mirror of
https://github.com/yyhuni/xingrin.git
synced 2026-01-31 19:53:11 +08:00
Compare commits
14 Commits
v1.3.13-de
...
v1.3.16-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1ec9b7f27 | ||
|
|
2a3d9b4446 | ||
|
|
9b63203b5a | ||
|
|
4c1282e9bb | ||
|
|
ba3a9b709d | ||
|
|
283b28b46a | ||
|
|
1269e5a314 | ||
|
|
802e967906 | ||
|
|
e446326416 | ||
|
|
e0abb3ce7b | ||
|
|
d418baaf79 | ||
|
|
f8da408580 | ||
|
|
7b7bbed634 | ||
|
|
08372588a4 |
@@ -25,6 +25,13 @@
|
||||
|
||||
---
|
||||
|
||||
## 🌐 在线 Demo
|
||||
|
||||
👉 **[https://xingrin.vercel.app/](https://xingrin.vercel.app/)**
|
||||
|
||||
> ⚠️ 仅用于 UI 展示,未接入后端数据库
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<b>🎨 现代化 UI </b>
|
||||
@@ -247,6 +254,7 @@ sudo ./uninstall.sh
|
||||
|
||||
## 📧 联系
|
||||
- 微信公众号: **塔罗安全学苑**
|
||||
- 微信群去公众号底下的菜单,有个交流群,点击就可以看到了,链接过期可以私信我拉你
|
||||
|
||||
<img src="docs/wechat-qrcode.png" alt="微信公众号" width="200">
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-02 04:45
|
||||
# Generated by Django 5.2.7 on 2026-01-06 00:55
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.contrib.postgres.indexes
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
"""
|
||||
创建资产搜索 IMMV(增量维护物化视图)
|
||||
|
||||
使用 pg_ivm 扩展创建 IMMV,数据变更时自动增量更新,无需手动刷新。
|
||||
|
||||
包含:
|
||||
1. asset_search_view - Website 搜索视图
|
||||
2. endpoint_search_view - Endpoint 搜索视图
|
||||
|
||||
重要限制:
|
||||
⚠️ pg_ivm 不支持数组类型字段(ArrayField),因为其使用 anyarray 伪类型进行比较时,
|
||||
PostgreSQL 无法确定空数组的元素类型,导致错误:
|
||||
"cannot determine element type of \"anyarray\" argument"
|
||||
|
||||
因此,所有 ArrayField 字段(tech, matched_gf_patterns 等)已从 IMMV 中移除,
|
||||
搜索时通过 JOIN 原表获取。
|
||||
|
||||
如需添加新的数组字段,请:
|
||||
1. 不要将其包含在 IMMV 视图中
|
||||
2. 在搜索服务中通过 JOIN 原表获取
|
||||
"""
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('asset', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# 1. 确保 pg_trgm 扩展已启用(用于文本模糊搜索索引)
|
||||
migrations.RunSQL(
|
||||
sql="CREATE EXTENSION IF NOT EXISTS pg_trgm;",
|
||||
reverse_sql="-- pg_trgm extension kept for other uses"
|
||||
),
|
||||
|
||||
# 2. 确保 pg_ivm 扩展已启用(用于 IMMV 增量维护)
|
||||
migrations.RunSQL(
|
||||
sql="CREATE EXTENSION IF NOT EXISTS pg_ivm;",
|
||||
reverse_sql="-- pg_ivm extension kept for other uses"
|
||||
),
|
||||
|
||||
# ==================== Website IMMV ====================
|
||||
|
||||
# 2. 创建 asset_search_view IMMV
|
||||
# ⚠️ 注意:不包含 w.tech 数组字段,pg_ivm 不支持 ArrayField
|
||||
# 数组字段通过 search_service.py 中 JOIN website 表获取
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
SELECT pgivm.create_immv('asset_search_view', $$
|
||||
SELECT
|
||||
w.id,
|
||||
w.url,
|
||||
w.host,
|
||||
w.title,
|
||||
w.status_code,
|
||||
w.response_headers,
|
||||
w.response_body,
|
||||
w.content_type,
|
||||
w.content_length,
|
||||
w.webserver,
|
||||
w.location,
|
||||
w.vhost,
|
||||
w.created_at,
|
||||
w.target_id
|
||||
FROM website w
|
||||
$$);
|
||||
""",
|
||||
reverse_sql="SELECT pgivm.drop_immv('asset_search_view');"
|
||||
),
|
||||
|
||||
# 3. 创建 asset_search_view 索引
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- 唯一索引
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS asset_search_view_id_idx
|
||||
ON asset_search_view (id);
|
||||
|
||||
-- host 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_host_trgm_idx
|
||||
ON asset_search_view USING gin (host gin_trgm_ops);
|
||||
|
||||
-- title 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_title_trgm_idx
|
||||
ON asset_search_view USING gin (title gin_trgm_ops);
|
||||
|
||||
-- url 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_url_trgm_idx
|
||||
ON asset_search_view USING gin (url gin_trgm_ops);
|
||||
|
||||
-- response_headers 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_headers_trgm_idx
|
||||
ON asset_search_view USING gin (response_headers gin_trgm_ops);
|
||||
|
||||
-- response_body 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_body_trgm_idx
|
||||
ON asset_search_view USING gin (response_body gin_trgm_ops);
|
||||
|
||||
-- status_code 索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_status_idx
|
||||
ON asset_search_view (status_code);
|
||||
|
||||
-- created_at 排序索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_created_idx
|
||||
ON asset_search_view (created_at DESC);
|
||||
""",
|
||||
reverse_sql="""
|
||||
DROP INDEX IF EXISTS asset_search_view_id_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_host_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_title_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_url_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_headers_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_body_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_status_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_created_idx;
|
||||
"""
|
||||
),
|
||||
|
||||
# ==================== Endpoint IMMV ====================
|
||||
|
||||
# 4. 创建 endpoint_search_view IMMV
|
||||
# ⚠️ 注意:不包含 e.tech 和 e.matched_gf_patterns 数组字段,pg_ivm 不支持 ArrayField
|
||||
# 数组字段通过 search_service.py 中 JOIN endpoint 表获取
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
SELECT pgivm.create_immv('endpoint_search_view', $$
|
||||
SELECT
|
||||
e.id,
|
||||
e.url,
|
||||
e.host,
|
||||
e.title,
|
||||
e.status_code,
|
||||
e.response_headers,
|
||||
e.response_body,
|
||||
e.content_type,
|
||||
e.content_length,
|
||||
e.webserver,
|
||||
e.location,
|
||||
e.vhost,
|
||||
e.created_at,
|
||||
e.target_id
|
||||
FROM endpoint e
|
||||
$$);
|
||||
""",
|
||||
reverse_sql="SELECT pgivm.drop_immv('endpoint_search_view');"
|
||||
),
|
||||
|
||||
# 5. 创建 endpoint_search_view 索引
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- 唯一索引
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS endpoint_search_view_id_idx
|
||||
ON endpoint_search_view (id);
|
||||
|
||||
-- host 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_host_trgm_idx
|
||||
ON endpoint_search_view USING gin (host gin_trgm_ops);
|
||||
|
||||
-- title 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_title_trgm_idx
|
||||
ON endpoint_search_view USING gin (title gin_trgm_ops);
|
||||
|
||||
-- url 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_url_trgm_idx
|
||||
ON endpoint_search_view USING gin (url gin_trgm_ops);
|
||||
|
||||
-- response_headers 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_headers_trgm_idx
|
||||
ON endpoint_search_view USING gin (response_headers gin_trgm_ops);
|
||||
|
||||
-- response_body 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_body_trgm_idx
|
||||
ON endpoint_search_view USING gin (response_body gin_trgm_ops);
|
||||
|
||||
-- status_code 索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_status_idx
|
||||
ON endpoint_search_view (status_code);
|
||||
|
||||
-- created_at 排序索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_created_idx
|
||||
ON endpoint_search_view (created_at DESC);
|
||||
""",
|
||||
reverse_sql="""
|
||||
DROP INDEX IF EXISTS endpoint_search_view_id_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_host_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_title_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_url_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_headers_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_body_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_status_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_created_idx;
|
||||
"""
|
||||
),
|
||||
]
|
||||
34
backend/apps/common/migrations/0001_initial.py
Normal file
34
backend/apps/common/migrations/0001_initial.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-06 00:55
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('targets', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='BlacklistRule',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('pattern', models.CharField(help_text='规则模式,如 *.gov, 10.0.0.0/8, 192.168.1.1', max_length=255)),
|
||||
('rule_type', models.CharField(choices=[('domain', '域名'), ('ip', 'IP地址'), ('cidr', 'CIDR范围'), ('keyword', '关键词')], help_text='规则类型:domain, ip, cidr', max_length=20)),
|
||||
('scope', models.CharField(choices=[('global', '全局规则'), ('target', 'Target规则')], db_index=True, help_text='作用域:global 或 target', max_length=20)),
|
||||
('description', models.CharField(blank=True, default='', help_text='规则描述', max_length=500)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('target', models.ForeignKey(blank=True, help_text='关联的 Target(仅 scope=target 时有值)', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='blacklist_rules', to='targets.target')),
|
||||
],
|
||||
options={
|
||||
'db_table': 'blacklist_rule',
|
||||
'ordering': ['-created_at'],
|
||||
'indexes': [models.Index(fields=['scope', 'rule_type'], name='blacklist_r_scope_6ff77f_idx'), models.Index(fields=['target', 'scope'], name='blacklist_r_target__191441_idx')],
|
||||
'constraints': [models.UniqueConstraint(fields=('pattern', 'scope', 'target'), name='unique_blacklist_rule')],
|
||||
},
|
||||
),
|
||||
]
|
||||
0
backend/apps/common/migrations/__init__.py
Normal file
0
backend/apps/common/migrations/__init__.py
Normal file
4
backend/apps/common/models/__init__.py
Normal file
4
backend/apps/common/models/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Common models"""
|
||||
from apps.common.models.blacklist import BlacklistRule
|
||||
|
||||
__all__ = ['BlacklistRule']
|
||||
71
backend/apps/common/models/blacklist.py
Normal file
71
backend/apps/common/models/blacklist.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""黑名单规则模型"""
|
||||
from django.db import models
|
||||
|
||||
|
||||
class BlacklistRule(models.Model):
|
||||
"""黑名单规则模型
|
||||
|
||||
用于存储黑名单过滤规则,支持域名、IP、CIDR 三种类型。
|
||||
支持两层作用域:全局规则和 Target 级规则。
|
||||
"""
|
||||
|
||||
class RuleType(models.TextChoices):
|
||||
DOMAIN = 'domain', '域名'
|
||||
IP = 'ip', 'IP地址'
|
||||
CIDR = 'cidr', 'CIDR范围'
|
||||
KEYWORD = 'keyword', '关键词'
|
||||
|
||||
class Scope(models.TextChoices):
|
||||
GLOBAL = 'global', '全局规则'
|
||||
TARGET = 'target', 'Target规则'
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
pattern = models.CharField(
|
||||
max_length=255,
|
||||
help_text='规则模式,如 *.gov, 10.0.0.0/8, 192.168.1.1'
|
||||
)
|
||||
rule_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=RuleType.choices,
|
||||
help_text='规则类型:domain, ip, cidr'
|
||||
)
|
||||
scope = models.CharField(
|
||||
max_length=20,
|
||||
choices=Scope.choices,
|
||||
db_index=True,
|
||||
help_text='作用域:global 或 target'
|
||||
)
|
||||
target = models.ForeignKey(
|
||||
'targets.Target',
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='blacklist_rules',
|
||||
help_text='关联的 Target(仅 scope=target 时有值)'
|
||||
)
|
||||
description = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='规则描述'
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'blacklist_rule'
|
||||
indexes = [
|
||||
models.Index(fields=['scope', 'rule_type']),
|
||||
models.Index(fields=['target', 'scope']),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=['pattern', 'scope', 'target'],
|
||||
name='unique_blacklist_rule'
|
||||
),
|
||||
]
|
||||
ordering = ['-created_at']
|
||||
|
||||
def __str__(self):
|
||||
if self.scope == self.Scope.TARGET and self.target:
|
||||
return f"[{self.scope}:{self.target_id}] {self.pattern}"
|
||||
return f"[{self.scope}] {self.pattern}"
|
||||
12
backend/apps/common/serializers/__init__.py
Normal file
12
backend/apps/common/serializers/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Common serializers"""
|
||||
from .blacklist_serializers import (
|
||||
BlacklistRuleSerializer,
|
||||
GlobalBlacklistRuleSerializer,
|
||||
TargetBlacklistRuleSerializer,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'BlacklistRuleSerializer',
|
||||
'GlobalBlacklistRuleSerializer',
|
||||
'TargetBlacklistRuleSerializer',
|
||||
]
|
||||
68
backend/apps/common/serializers/blacklist_serializers.py
Normal file
68
backend/apps/common/serializers/blacklist_serializers.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""黑名单规则序列化器"""
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.common.models import BlacklistRule
|
||||
from apps.common.utils import detect_rule_type
|
||||
|
||||
|
||||
class BlacklistRuleSerializer(serializers.ModelSerializer):
|
||||
"""黑名单规则序列化器"""
|
||||
|
||||
class Meta:
|
||||
model = BlacklistRule
|
||||
fields = [
|
||||
'id',
|
||||
'pattern',
|
||||
'rule_type',
|
||||
'scope',
|
||||
'target',
|
||||
'description',
|
||||
'created_at',
|
||||
]
|
||||
read_only_fields = ['id', 'rule_type', 'created_at']
|
||||
|
||||
def validate_pattern(self, value):
|
||||
"""验证规则模式"""
|
||||
if not value or not value.strip():
|
||||
raise serializers.ValidationError("规则模式不能为空")
|
||||
return value.strip()
|
||||
|
||||
def create(self, validated_data):
|
||||
"""创建规则时自动识别规则类型"""
|
||||
pattern = validated_data.get('pattern', '')
|
||||
validated_data['rule_type'] = detect_rule_type(pattern)
|
||||
return super().create(validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""更新规则时重新识别规则类型"""
|
||||
if 'pattern' in validated_data:
|
||||
pattern = validated_data['pattern']
|
||||
validated_data['rule_type'] = detect_rule_type(pattern)
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class GlobalBlacklistRuleSerializer(BlacklistRuleSerializer):
|
||||
"""全局黑名单规则序列化器"""
|
||||
|
||||
class Meta(BlacklistRuleSerializer.Meta):
|
||||
fields = ['id', 'pattern', 'rule_type', 'description', 'created_at']
|
||||
read_only_fields = ['id', 'rule_type', 'created_at']
|
||||
|
||||
def create(self, validated_data):
|
||||
"""创建全局规则"""
|
||||
validated_data['scope'] = BlacklistRule.Scope.GLOBAL
|
||||
validated_data['target'] = None
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class TargetBlacklistRuleSerializer(BlacklistRuleSerializer):
|
||||
"""Target 黑名单规则序列化器"""
|
||||
|
||||
class Meta(BlacklistRuleSerializer.Meta):
|
||||
fields = ['id', 'pattern', 'rule_type', 'description', 'created_at']
|
||||
read_only_fields = ['id', 'rule_type', 'created_at']
|
||||
|
||||
def create(self, validated_data):
|
||||
"""创建 Target 规则(target_id 由 view 设置)"""
|
||||
validated_data['scope'] = BlacklistRule.Scope.TARGET
|
||||
return super().create(validated_data)
|
||||
@@ -3,13 +3,16 @@
|
||||
|
||||
提供系统级别的公共服务,包括:
|
||||
- SystemLogService: 系统日志读取服务
|
||||
- BlacklistService: 黑名单过滤服务
|
||||
|
||||
注意:FilterService 已移至 apps.common.utils.filter_utils
|
||||
推荐使用: from apps.common.utils.filter_utils import apply_filters
|
||||
"""
|
||||
|
||||
from .system_log_service import SystemLogService
|
||||
from .blacklist_service import BlacklistService
|
||||
|
||||
__all__ = [
|
||||
'SystemLogService',
|
||||
'BlacklistService',
|
||||
]
|
||||
|
||||
176
backend/apps/common/services/blacklist_service.py
Normal file
176
backend/apps/common/services/blacklist_service.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
黑名单规则管理服务
|
||||
|
||||
负责黑名单规则的 CRUD 操作(数据库层面)。
|
||||
过滤逻辑请使用 apps.common.utils.BlacklistFilter。
|
||||
|
||||
架构说明:
|
||||
- Model: BlacklistRule (apps.common.models.blacklist)
|
||||
- Service: BlacklistService (本文件) - 规则 CRUD
|
||||
- Utils: BlacklistFilter (apps.common.utils.blacklist_filter) - 过滤逻辑
|
||||
- View: GlobalBlacklistView, TargetViewSet.blacklist
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from apps.common.utils import detect_rule_type
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _normalize_patterns(patterns: List[str]) -> List[str]:
|
||||
"""
|
||||
规范化规则列表:去重 + 过滤空行
|
||||
|
||||
Args:
|
||||
patterns: 原始规则列表
|
||||
|
||||
Returns:
|
||||
List[str]: 去重后的规则列表(保持顺序)
|
||||
"""
|
||||
return list(dict.fromkeys(filter(None, (p.strip() for p in patterns))))
|
||||
|
||||
|
||||
class BlacklistService:
|
||||
"""
|
||||
黑名单规则管理服务
|
||||
|
||||
只负责规则的 CRUD 操作,不包含过滤逻辑。
|
||||
过滤逻辑请使用 BlacklistFilter 工具类。
|
||||
"""
|
||||
|
||||
def get_global_rules(self) -> QuerySet:
|
||||
"""
|
||||
获取全局黑名单规则列表
|
||||
|
||||
Returns:
|
||||
QuerySet: 全局规则查询集
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
return BlacklistRule.objects.filter(scope=BlacklistRule.Scope.GLOBAL)
|
||||
|
||||
def get_target_rules(self, target_id: int) -> QuerySet:
|
||||
"""
|
||||
获取 Target 级黑名单规则列表
|
||||
|
||||
Args:
|
||||
target_id: Target ID
|
||||
|
||||
Returns:
|
||||
QuerySet: Target 级规则查询集
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
return BlacklistRule.objects.filter(
|
||||
scope=BlacklistRule.Scope.TARGET,
|
||||
target_id=target_id
|
||||
)
|
||||
|
||||
def get_rules(self, target_id: Optional[int] = None) -> List:
|
||||
"""
|
||||
获取黑名单规则(全局 + Target 级)
|
||||
|
||||
Args:
|
||||
target_id: Target ID,用于加载 Target 级规则
|
||||
|
||||
Returns:
|
||||
List[BlacklistRule]: 规则列表
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
|
||||
# 加载全局规则
|
||||
rules = list(BlacklistRule.objects.filter(scope=BlacklistRule.Scope.GLOBAL))
|
||||
|
||||
# 加载 Target 级规则
|
||||
if target_id:
|
||||
target_rules = BlacklistRule.objects.filter(
|
||||
scope=BlacklistRule.Scope.TARGET,
|
||||
target_id=target_id
|
||||
)
|
||||
rules.extend(target_rules)
|
||||
|
||||
return rules
|
||||
|
||||
def replace_global_rules(self, patterns: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
全量替换全局黑名单规则(PUT 语义)
|
||||
|
||||
Args:
|
||||
patterns: 新的规则模式列表
|
||||
|
||||
Returns:
|
||||
Dict: {'count': int} 最终规则数量
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
|
||||
count = self._replace_rules(
|
||||
patterns=patterns,
|
||||
scope=BlacklistRule.Scope.GLOBAL,
|
||||
target=None
|
||||
)
|
||||
|
||||
logger.info("全量替换全局黑名单规则: %d 条", count)
|
||||
return {'count': count}
|
||||
|
||||
def replace_target_rules(self, target, patterns: List[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
全量替换 Target 级黑名单规则(PUT 语义)
|
||||
|
||||
Args:
|
||||
target: Target 对象
|
||||
patterns: 新的规则模式列表
|
||||
|
||||
Returns:
|
||||
Dict: {'count': int} 最终规则数量
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
|
||||
count = self._replace_rules(
|
||||
patterns=patterns,
|
||||
scope=BlacklistRule.Scope.TARGET,
|
||||
target=target
|
||||
)
|
||||
|
||||
logger.info("全量替换 Target 黑名单规则: %d 条 (Target: %s)", count, target.name)
|
||||
return {'count': count}
|
||||
|
||||
def _replace_rules(self, patterns: List[str], scope: str, target=None) -> int:
|
||||
"""
|
||||
内部方法:全量替换规则
|
||||
|
||||
Args:
|
||||
patterns: 规则模式列表
|
||||
scope: 规则作用域 (GLOBAL/TARGET)
|
||||
target: Target 对象(仅 TARGET 作用域需要)
|
||||
|
||||
Returns:
|
||||
int: 最终规则数量
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
from django.db import transaction
|
||||
|
||||
patterns = _normalize_patterns(patterns)
|
||||
|
||||
with transaction.atomic():
|
||||
# 1. 删除旧规则
|
||||
delete_filter = {'scope': scope}
|
||||
if target:
|
||||
delete_filter['target'] = target
|
||||
BlacklistRule.objects.filter(**delete_filter).delete()
|
||||
|
||||
# 2. 创建新规则
|
||||
if patterns:
|
||||
rules = [
|
||||
BlacklistRule(
|
||||
pattern=pattern,
|
||||
rule_type=detect_rule_type(pattern),
|
||||
scope=scope,
|
||||
target=target
|
||||
)
|
||||
for pattern in patterns
|
||||
]
|
||||
BlacklistRule.objects.bulk_create(rules)
|
||||
|
||||
return len(patterns)
|
||||
@@ -2,13 +2,19 @@
|
||||
通用模块 URL 配置
|
||||
|
||||
路由说明:
|
||||
- /api/health/ 健康检查接口(无需认证)
|
||||
- /api/auth/* 认证相关接口(登录、登出、用户信息)
|
||||
- /api/system/* 系统管理接口(日志查看等)
|
||||
- /api/health/ 健康检查接口(无需认证)
|
||||
- /api/auth/* 认证相关接口(登录、登出、用户信息)
|
||||
- /api/system/* 系统管理接口(日志查看等)
|
||||
- /api/blacklist/* 黑名单管理接口
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from .views import LoginView, LogoutView, MeView, ChangePasswordView, SystemLogsView, SystemLogFilesView, HealthCheckView
|
||||
|
||||
from .views import (
|
||||
LoginView, LogoutView, MeView, ChangePasswordView,
|
||||
SystemLogsView, SystemLogFilesView, HealthCheckView,
|
||||
GlobalBlacklistView,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
# 健康检查(无需认证)
|
||||
@@ -23,4 +29,7 @@ urlpatterns = [
|
||||
# 系统管理
|
||||
path('system/logs/', SystemLogsView.as_view(), name='system-logs'),
|
||||
path('system/logs/files/', SystemLogFilesView.as_view(), name='system-log-files'),
|
||||
|
||||
# 黑名单管理(PUT 全量替换模式)
|
||||
path('blacklist/rules/', GlobalBlacklistView.as_view(), name='blacklist-rules'),
|
||||
]
|
||||
|
||||
@@ -14,6 +14,11 @@ from .csv_utils import (
|
||||
create_csv_export_response,
|
||||
UTF8_BOM,
|
||||
)
|
||||
from .blacklist_filter import (
|
||||
BlacklistFilter,
|
||||
detect_rule_type,
|
||||
extract_host,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'deduplicate_for_bulk',
|
||||
@@ -27,4 +32,7 @@ __all__ = [
|
||||
'format_datetime',
|
||||
'create_csv_export_response',
|
||||
'UTF8_BOM',
|
||||
'BlacklistFilter',
|
||||
'detect_rule_type',
|
||||
'extract_host',
|
||||
]
|
||||
|
||||
246
backend/apps/common/utils/blacklist_filter.py
Normal file
246
backend/apps/common/utils/blacklist_filter.py
Normal file
@@ -0,0 +1,246 @@
|
||||
"""
|
||||
黑名单过滤工具
|
||||
|
||||
提供域名、IP、CIDR、关键词的黑名单匹配功能。
|
||||
纯工具类,不涉及数据库操作。
|
||||
|
||||
支持的规则类型:
|
||||
1. 域名精确匹配: example.com
|
||||
- 规则: example.com
|
||||
- 匹配: example.com
|
||||
- 不匹配: sub.example.com, other.com
|
||||
|
||||
2. 域名后缀匹配: *.example.com
|
||||
- 规则: *.example.com
|
||||
- 匹配: sub.example.com, a.b.example.com, example.com
|
||||
- 不匹配: other.com, example.com.cn
|
||||
|
||||
3. 关键词匹配: *cdn*
|
||||
- 规则: *cdn*
|
||||
- 匹配: cdn.example.com, a.cdn.b.com, mycdn123.com
|
||||
- 不匹配: example.com (不包含 cdn)
|
||||
|
||||
4. IP 精确匹配: 192.168.1.1
|
||||
- 规则: 192.168.1.1
|
||||
- 匹配: 192.168.1.1
|
||||
- 不匹配: 192.168.1.2
|
||||
|
||||
5. CIDR 范围匹配: 192.168.0.0/24
|
||||
- 规则: 192.168.0.0/24
|
||||
- 匹配: 192.168.0.1, 192.168.0.255
|
||||
- 不匹配: 192.168.1.1
|
||||
|
||||
使用方式:
|
||||
from apps.common.utils import BlacklistFilter
|
||||
|
||||
# 创建过滤器(传入规则列表)
|
||||
rules = BlacklistRule.objects.filter(...)
|
||||
filter = BlacklistFilter(rules)
|
||||
|
||||
# 检查单个目标
|
||||
if filter.is_allowed('http://example.com'):
|
||||
process(url)
|
||||
|
||||
# 流式处理
|
||||
for url in urls:
|
||||
if filter.is_allowed(url):
|
||||
process(url)
|
||||
"""
|
||||
|
||||
import ipaddress
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from apps.common.validators import is_valid_ip, validate_cidr
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def detect_rule_type(pattern: str) -> str:
|
||||
"""
|
||||
自动识别规则类型
|
||||
|
||||
支持的模式:
|
||||
- 域名精确匹配: example.com
|
||||
- 域名后缀匹配: *.example.com
|
||||
- 关键词匹配: *cdn* (匹配包含 cdn 的域名)
|
||||
- IP 精确匹配: 192.168.1.1
|
||||
- CIDR 范围: 192.168.0.0/24
|
||||
|
||||
Args:
|
||||
pattern: 规则模式字符串
|
||||
|
||||
Returns:
|
||||
str: 规则类型 ('domain', 'ip', 'cidr', 'keyword')
|
||||
"""
|
||||
if not pattern:
|
||||
return 'domain'
|
||||
|
||||
pattern = pattern.strip()
|
||||
|
||||
# 检查关键词模式: *keyword* (前后都有星号,中间无点)
|
||||
if pattern.startswith('*') and pattern.endswith('*') and len(pattern) > 2:
|
||||
keyword = pattern[1:-1]
|
||||
# 关键词中不能有点(否则可能是域名模式)
|
||||
if '.' not in keyword:
|
||||
return 'keyword'
|
||||
|
||||
# 检查 CIDR(包含 /)
|
||||
if '/' in pattern:
|
||||
try:
|
||||
validate_cidr(pattern)
|
||||
return 'cidr'
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# 检查 IP(去掉通配符前缀后验证)
|
||||
clean_pattern = pattern.lstrip('*').lstrip('.')
|
||||
if is_valid_ip(clean_pattern):
|
||||
return 'ip'
|
||||
|
||||
# 默认为域名
|
||||
return 'domain'
|
||||
|
||||
|
||||
def extract_host(target: str) -> str:
|
||||
"""
|
||||
从目标字符串中提取主机名
|
||||
|
||||
支持:
|
||||
- 纯域名:example.com
|
||||
- 纯 IP:192.168.1.1
|
||||
- URL:http://example.com/path
|
||||
|
||||
Args:
|
||||
target: 目标字符串
|
||||
|
||||
Returns:
|
||||
str: 提取的主机名
|
||||
"""
|
||||
if not target:
|
||||
return ''
|
||||
|
||||
target = target.strip()
|
||||
|
||||
# 如果是 URL,提取 hostname
|
||||
if '://' in target:
|
||||
try:
|
||||
parsed = urlparse(target)
|
||||
return parsed.hostname or target
|
||||
except Exception:
|
||||
return target
|
||||
|
||||
return target
|
||||
|
||||
|
||||
class BlacklistFilter:
|
||||
"""
|
||||
黑名单过滤器
|
||||
|
||||
预编译规则,提供高效的匹配功能。
|
||||
"""
|
||||
|
||||
def __init__(self, rules: List):
|
||||
"""
|
||||
初始化过滤器
|
||||
|
||||
Args:
|
||||
rules: BlacklistRule 对象列表
|
||||
"""
|
||||
from apps.common.models import BlacklistRule
|
||||
|
||||
# 预解析:按类型分类 + CIDR 预编译
|
||||
self._domain_rules = [] # (pattern, is_wildcard, suffix)
|
||||
self._ip_rules = set() # 精确 IP 用 set,O(1) 查找
|
||||
self._cidr_rules = [] # (pattern, network_obj)
|
||||
self._keyword_rules = [] # 关键词列表(小写)
|
||||
|
||||
# 去重:跨 scope 可能有重复规则
|
||||
seen_patterns = set()
|
||||
|
||||
for rule in rules:
|
||||
if rule.pattern in seen_patterns:
|
||||
continue
|
||||
seen_patterns.add(rule.pattern)
|
||||
if rule.rule_type == BlacklistRule.RuleType.DOMAIN:
|
||||
pattern = rule.pattern.lower()
|
||||
if pattern.startswith('*.'):
|
||||
self._domain_rules.append((pattern, True, pattern[1:]))
|
||||
else:
|
||||
self._domain_rules.append((pattern, False, None))
|
||||
elif rule.rule_type == BlacklistRule.RuleType.IP:
|
||||
self._ip_rules.add(rule.pattern)
|
||||
elif rule.rule_type == BlacklistRule.RuleType.CIDR:
|
||||
try:
|
||||
network = ipaddress.ip_network(rule.pattern, strict=False)
|
||||
self._cidr_rules.append((rule.pattern, network))
|
||||
except ValueError:
|
||||
pass
|
||||
elif rule.rule_type == BlacklistRule.RuleType.KEYWORD:
|
||||
# *cdn* -> cdn
|
||||
keyword = rule.pattern[1:-1].lower()
|
||||
self._keyword_rules.append(keyword)
|
||||
|
||||
def is_allowed(self, target: str) -> bool:
|
||||
"""
|
||||
检查目标是否通过过滤
|
||||
|
||||
Args:
|
||||
target: 要检查的目标(域名/IP/URL)
|
||||
|
||||
Returns:
|
||||
bool: True 表示通过(不在黑名单),False 表示被过滤
|
||||
"""
|
||||
if not target:
|
||||
return True
|
||||
|
||||
host = extract_host(target)
|
||||
if not host:
|
||||
return True
|
||||
|
||||
# 先判断输入类型,再走对应分支
|
||||
if is_valid_ip(host):
|
||||
return self._check_ip_rules(host)
|
||||
else:
|
||||
return self._check_domain_rules(host)
|
||||
|
||||
def _check_domain_rules(self, host: str) -> bool:
|
||||
"""检查域名规则(精确匹配 + 后缀匹配 + 关键词匹配)"""
|
||||
host_lower = host.lower()
|
||||
|
||||
# 1. 域名规则(精确 + 后缀)
|
||||
for pattern, is_wildcard, suffix in self._domain_rules:
|
||||
if is_wildcard:
|
||||
if host_lower.endswith(suffix) or host_lower == pattern[2:]:
|
||||
return False
|
||||
else:
|
||||
if host_lower == pattern:
|
||||
return False
|
||||
|
||||
# 2. 关键词匹配(字符串 in 操作,O(n*m))
|
||||
for keyword in self._keyword_rules:
|
||||
if keyword in host_lower:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _check_ip_rules(self, host: str) -> bool:
|
||||
"""检查 IP 规则(精确匹配 + CIDR)"""
|
||||
# 1. IP 精确匹配(O(1))
|
||||
if host in self._ip_rules:
|
||||
return False
|
||||
|
||||
# 2. CIDR 匹配
|
||||
if self._cidr_rules:
|
||||
try:
|
||||
ip_obj = ipaddress.ip_address(host)
|
||||
for _, network in self._cidr_rules:
|
||||
if ip_obj in network:
|
||||
return False
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -5,14 +5,17 @@
|
||||
- 健康检查视图:Docker 健康检查
|
||||
- 认证相关视图:登录、登出、用户信息、修改密码
|
||||
- 系统日志视图:实时日志查看
|
||||
- 黑名单视图:全局黑名单规则管理
|
||||
"""
|
||||
|
||||
from .health_views import HealthCheckView
|
||||
from .auth_views import LoginView, LogoutView, MeView, ChangePasswordView
|
||||
from .system_log_views import SystemLogsView, SystemLogFilesView
|
||||
from .blacklist_views import GlobalBlacklistView
|
||||
|
||||
__all__ = [
|
||||
'HealthCheckView',
|
||||
'LoginView', 'LogoutView', 'MeView', 'ChangePasswordView',
|
||||
'SystemLogsView', 'SystemLogFilesView',
|
||||
'GlobalBlacklistView',
|
||||
]
|
||||
|
||||
80
backend/apps/common/views/blacklist_views.py
Normal file
80
backend/apps/common/views/blacklist_views.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""全局黑名单 API 视图"""
|
||||
import logging
|
||||
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
|
||||
from apps.common.response_helpers import success_response, error_response
|
||||
from apps.common.services import BlacklistService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GlobalBlacklistView(APIView):
|
||||
"""
|
||||
全局黑名单规则 API
|
||||
|
||||
Endpoints:
|
||||
- GET /api/blacklist/rules/ - 获取全局黑名单列表
|
||||
- PUT /api/blacklist/rules/ - 全量替换规则(文本框保存场景)
|
||||
|
||||
设计说明:
|
||||
- 使用 PUT 全量替换模式,适合"文本框每行一个规则"的前端场景
|
||||
- 用户编辑文本框 -> 点击保存 -> 后端全量替换
|
||||
|
||||
架构:MVS 模式
|
||||
- View: 参数验证、响应格式化
|
||||
- Service: 业务逻辑(BlacklistService)
|
||||
- Model: 数据持久化(BlacklistRule)
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.blacklist_service = BlacklistService()
|
||||
|
||||
def get(self, request):
|
||||
"""
|
||||
获取全局黑名单规则列表
|
||||
|
||||
返回格式:
|
||||
{
|
||||
"patterns": ["*.gov", "*.edu", "10.0.0.0/8"]
|
||||
}
|
||||
"""
|
||||
rules = self.blacklist_service.get_global_rules()
|
||||
patterns = list(rules.values_list('pattern', flat=True))
|
||||
return success_response(data={'patterns': patterns})
|
||||
|
||||
def put(self, request):
|
||||
"""
|
||||
全量替换全局黑名单规则
|
||||
|
||||
请求格式:
|
||||
{
|
||||
"patterns": ["*.gov", "*.edu", "10.0.0.0/8"]
|
||||
}
|
||||
|
||||
或者空数组清空所有规则:
|
||||
{
|
||||
"patterns": []
|
||||
}
|
||||
"""
|
||||
patterns = request.data.get('patterns', [])
|
||||
|
||||
# 兼容字符串输入(换行分隔)
|
||||
if isinstance(patterns, str):
|
||||
patterns = [p for p in patterns.split('\n') if p.strip()]
|
||||
|
||||
if not isinstance(patterns, list):
|
||||
return error_response(
|
||||
code='VALIDATION_ERROR',
|
||||
message='patterns 必须是数组'
|
||||
)
|
||||
|
||||
# 调用 Service 层全量替换
|
||||
result = self.blacklist_service.replace_global_rules(patterns)
|
||||
|
||||
return success_response(data=result)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-02 04:45
|
||||
# Generated by Django 5.2.7 on 2026-01-06 00:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
@@ -13,12 +13,14 @@ SCAN_TOOLS_BASE_PATH = getattr(settings, 'SCAN_TOOLS_BASE_PATH', '/usr/local/bin
|
||||
|
||||
SUBDOMAIN_DISCOVERY_COMMANDS = {
|
||||
'subfinder': {
|
||||
# 默认使用所有数据源(更全面,略慢),并始终开启递归
|
||||
# -all 使用所有数据源
|
||||
# -recursive 对支持递归的源启用递归枚举(默认开启)
|
||||
'base': "subfinder -d {domain} -all -recursive -o '{output_file}' -silent",
|
||||
# 使用所有数据源(包括付费源,只要配置了 API key)
|
||||
# -all 使用所有数据源(slow 但全面)
|
||||
# -v 显示详细输出,包括使用的数据源(调试用)
|
||||
# 注意:不要加 -recursive,它会排除不支持递归的源(如 fofa)
|
||||
'base': "subfinder -d {domain} -all -o '{output_file}' -v",
|
||||
'optional': {
|
||||
'threads': '-t {threads}', # 控制并发 goroutine 数
|
||||
'provider_config': "-pc '{provider_config}'", # Provider 配置文件路径
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from pathlib import Path
|
||||
from typing import Callable
|
||||
from prefect import flow
|
||||
from apps.scan.tasks.port_scan import (
|
||||
export_scan_targets_task,
|
||||
export_hosts_task,
|
||||
run_and_stream_save_ports_task
|
||||
)
|
||||
from apps.scan.handlers.scan_flow_handlers import (
|
||||
@@ -157,9 +157,9 @@ def _parse_port_count(tool_config: dict) -> int:
|
||||
|
||||
|
||||
|
||||
def _export_scan_targets(target_id: int, port_scan_dir: Path) -> tuple[str, int, str]:
|
||||
def _export_hosts(target_id: int, port_scan_dir: Path) -> tuple[str, int, str]:
|
||||
"""
|
||||
导出扫描目标到文件
|
||||
导出主机列表到文件
|
||||
|
||||
根据 Target 类型自动决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名
|
||||
@@ -171,31 +171,31 @@ def _export_scan_targets(target_id: int, port_scan_dir: Path) -> tuple[str, int,
|
||||
port_scan_dir: 端口扫描目录
|
||||
|
||||
Returns:
|
||||
tuple: (targets_file, target_count, target_type)
|
||||
tuple: (hosts_file, host_count, target_type)
|
||||
"""
|
||||
logger.info("Step 1: 导出扫描目标列表")
|
||||
logger.info("Step 1: 导出主机列表")
|
||||
|
||||
targets_file = str(port_scan_dir / 'targets.txt')
|
||||
export_result = export_scan_targets_task(
|
||||
hosts_file = str(port_scan_dir / 'hosts.txt')
|
||||
export_result = export_hosts_task(
|
||||
target_id=target_id,
|
||||
output_file=targets_file,
|
||||
output_file=hosts_file,
|
||||
batch_size=1000 # 每次读取 1000 条,优化内存占用
|
||||
)
|
||||
|
||||
target_count = export_result['total_count']
|
||||
host_count = export_result['total_count']
|
||||
target_type = export_result.get('target_type', 'unknown')
|
||||
|
||||
logger.info(
|
||||
"✓ 扫描目标导出完成 - 类型: %s, 文件: %s, 数量: %d",
|
||||
"✓ 主机列表导出完成 - 类型: %s, 文件: %s, 数量: %d",
|
||||
target_type,
|
||||
export_result['output_file'],
|
||||
target_count
|
||||
host_count
|
||||
)
|
||||
|
||||
if target_count == 0:
|
||||
logger.warning("目标下没有可扫描的地址,无法执行端口扫描")
|
||||
if host_count == 0:
|
||||
logger.warning("目标下没有可扫描的主机,无法执行端口扫描")
|
||||
|
||||
return export_result['output_file'], target_count, target_type
|
||||
return export_result['output_file'], host_count, target_type
|
||||
|
||||
|
||||
def _run_scans_sequentially(
|
||||
@@ -382,8 +382,8 @@ def port_scan_flow(
|
||||
'scan_id': int,
|
||||
'target': str,
|
||||
'scan_workspace_dir': str,
|
||||
'domains_file': str,
|
||||
'domain_count': int,
|
||||
'hosts_file': str,
|
||||
'host_count': int,
|
||||
'processed_records': int,
|
||||
'executed_tasks': list,
|
||||
'tool_stats': {
|
||||
@@ -432,22 +432,22 @@ def port_scan_flow(
|
||||
from apps.scan.utils import setup_scan_directory
|
||||
port_scan_dir = setup_scan_directory(scan_workspace_dir, 'port_scan')
|
||||
|
||||
# Step 1: 导出扫描目标列表到文件(根据 Target 类型自动决定内容)
|
||||
targets_file, target_count, target_type = _export_scan_targets(target_id, port_scan_dir)
|
||||
# Step 1: 导出主机列表到文件(根据 Target 类型自动决定内容)
|
||||
hosts_file, host_count, target_type = _export_hosts(target_id, port_scan_dir)
|
||||
|
||||
if target_count == 0:
|
||||
logger.warning("跳过端口扫描:没有目标可扫描 - Scan ID: %s", scan_id)
|
||||
user_log(scan_id, "port_scan", "Skipped: no targets to scan", "warning")
|
||||
if host_count == 0:
|
||||
logger.warning("跳过端口扫描:没有主机可扫描 - Scan ID: %s", scan_id)
|
||||
user_log(scan_id, "port_scan", "Skipped: no hosts to scan", "warning")
|
||||
return {
|
||||
'success': True,
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'targets_file': targets_file,
|
||||
'target_count': 0,
|
||||
'hosts_file': hosts_file,
|
||||
'host_count': 0,
|
||||
'target_type': target_type,
|
||||
'processed_records': 0,
|
||||
'executed_tasks': ['export_scan_targets'],
|
||||
'executed_tasks': ['export_hosts'],
|
||||
'tool_stats': {
|
||||
'total': 0,
|
||||
'successful': 0,
|
||||
@@ -469,7 +469,7 @@ def port_scan_flow(
|
||||
logger.info("Step 3: 串行执行扫描工具")
|
||||
tool_stats, processed_records, successful_tool_names, failed_tools = _run_scans_sequentially(
|
||||
enabled_tools=enabled_tools,
|
||||
domains_file=targets_file, # 现在是 targets_file,兼容原参数名
|
||||
domains_file=hosts_file,
|
||||
port_scan_dir=port_scan_dir,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
@@ -481,7 +481,7 @@ def port_scan_flow(
|
||||
user_log(scan_id, "port_scan", f"port_scan completed: found {processed_records} ports")
|
||||
|
||||
# 动态生成已执行的任务列表
|
||||
executed_tasks = ['export_scan_targets', 'parse_config']
|
||||
executed_tasks = ['export_hosts', 'parse_config']
|
||||
executed_tasks.extend([f'run_and_stream_save_ports ({tool})' for tool in tool_stats.keys()])
|
||||
|
||||
return {
|
||||
@@ -489,8 +489,8 @@ def port_scan_flow(
|
||||
'scan_id': scan_id,
|
||||
'target': target_name,
|
||||
'scan_workspace_dir': scan_workspace_dir,
|
||||
'targets_file': targets_file,
|
||||
'target_count': target_count,
|
||||
'hosts_file': hosts_file,
|
||||
'host_count': host_count,
|
||||
'target_type': target_type,
|
||||
'processed_records': processed_records,
|
||||
'executed_tasks': executed_tasks,
|
||||
@@ -499,8 +499,8 @@ def port_scan_flow(
|
||||
'successful': len(successful_tool_names),
|
||||
'failed': len(failed_tools),
|
||||
'successful_tools': successful_tool_names,
|
||||
'failed_tools': failed_tools, # [{'tool': 'naabu_active', 'reason': '超时'}]
|
||||
'details': tool_stats # 详细结果(保留向后兼容)
|
||||
'failed_tools': failed_tools,
|
||||
'details': tool_stats
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -78,7 +78,8 @@ def _run_scans_parallel(
|
||||
enabled_tools: dict,
|
||||
domain_name: str,
|
||||
result_dir: Path,
|
||||
scan_id: int
|
||||
scan_id: int,
|
||||
provider_config_path: str = None
|
||||
) -> tuple[list, list, list]:
|
||||
"""
|
||||
并行运行所有启用的子域名扫描工具
|
||||
@@ -88,6 +89,7 @@ def _run_scans_parallel(
|
||||
domain_name: 目标域名
|
||||
result_dir: 结果输出目录
|
||||
scan_id: 扫描任务 ID(用于记录日志)
|
||||
provider_config_path: Provider 配置文件路径(可选,用于 subfinder)
|
||||
|
||||
Returns:
|
||||
tuple: (result_files, failed_tools, successful_tool_names)
|
||||
@@ -112,13 +114,19 @@ def _run_scans_parallel(
|
||||
|
||||
# 1.2 构建完整命令(变量替换)
|
||||
try:
|
||||
command_params = {
|
||||
'domain': domain_name, # 对应 {domain}
|
||||
'output_file': output_file # 对应 {output_file}
|
||||
}
|
||||
|
||||
# 如果是 subfinder 且有 provider_config,添加到参数
|
||||
if tool_name == 'subfinder' and provider_config_path:
|
||||
command_params['provider_config'] = provider_config_path
|
||||
|
||||
command = build_scan_command(
|
||||
tool_name=tool_name,
|
||||
scan_type='subdomain_discovery',
|
||||
command_params={
|
||||
'domain': domain_name, # 对应 {domain}
|
||||
'output_file': output_file # 对应 {output_file}
|
||||
},
|
||||
command_params=command_params,
|
||||
tool_config=tool_config
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -440,6 +448,19 @@ def subdomain_discovery_flow(
|
||||
failed_tools = []
|
||||
successful_tool_names = []
|
||||
|
||||
# ==================== 生成 Provider 配置文件 ====================
|
||||
# 为 subfinder 生成第三方数据源配置
|
||||
provider_config_path = None
|
||||
try:
|
||||
from apps.scan.services.subfinder_provider_config_service import SubfinderProviderConfigService
|
||||
provider_config_service = SubfinderProviderConfigService()
|
||||
provider_config_path = provider_config_service.generate(str(result_dir))
|
||||
if provider_config_path:
|
||||
logger.info(f"Provider 配置文件已生成: {provider_config_path}")
|
||||
user_log(scan_id, "subdomain_discovery", "Provider config generated for subfinder")
|
||||
except Exception as e:
|
||||
logger.warning(f"生成 Provider 配置文件失败: {e}")
|
||||
|
||||
# ==================== Stage 1: 被动收集(并行)====================
|
||||
if enabled_passive_tools:
|
||||
logger.info("=" * 40)
|
||||
@@ -451,7 +472,8 @@ def subdomain_discovery_flow(
|
||||
enabled_tools=enabled_passive_tools,
|
||||
domain_name=domain_name,
|
||||
result_dir=result_dir,
|
||||
scan_id=scan_id
|
||||
scan_id=scan_id,
|
||||
provider_config_path=provider_config_path
|
||||
)
|
||||
all_result_files.extend(result_files)
|
||||
failed_tools.extend(stage1_failed)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-02 04:45
|
||||
# Generated by Django 5.2.7 on 2026-01-06 00:55
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
@@ -31,6 +31,20 @@ class Migration(migrations.Migration):
|
||||
'db_table': 'notification_settings',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SubfinderProviderSettings',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('providers', models.JSONField(default=dict, help_text='各 Provider 的 API Key 配置')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Subfinder Provider 配置',
|
||||
'verbose_name_plural': 'Subfinder Provider 配置',
|
||||
'db_table': 'subfinder_provider_settings',
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Notification',
|
||||
fields=[
|
||||
@@ -87,7 +101,22 @@ class Migration(migrations.Migration):
|
||||
'verbose_name_plural': '扫描任务',
|
||||
'db_table': 'scan',
|
||||
'ordering': ['-created_at'],
|
||||
'indexes': [models.Index(fields=['-created_at'], name='scan_created_0bb6c7_idx'), models.Index(fields=['target'], name='scan_target__718b9d_idx'), models.Index(fields=['deleted_at', '-created_at'], name='scan_deleted_eb17e8_idx')],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ScanLog',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('level', models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error')], default='info', help_text='日志级别', max_length=10)),
|
||||
('content', models.TextField(help_text='日志内容')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, db_index=True, help_text='创建时间')),
|
||||
('scan', models.ForeignKey(help_text='关联的扫描任务', on_delete=django.db.models.deletion.CASCADE, related_name='logs', to='scan.scan')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': '扫描日志',
|
||||
'verbose_name_plural': '扫描日志',
|
||||
'db_table': 'scan_log',
|
||||
'ordering': ['created_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@@ -113,24 +142,34 @@ class Migration(migrations.Migration):
|
||||
'verbose_name_plural': '定时扫描任务',
|
||||
'db_table': 'scheduled_scan',
|
||||
'ordering': ['-created_at'],
|
||||
'indexes': [models.Index(fields=['-created_at'], name='scheduled_s_created_9b9c2e_idx'), models.Index(fields=['is_enabled', '-created_at'], name='scheduled_s_is_enab_23d660_idx'), models.Index(fields=['name'], name='scheduled_s_name_bf332d_idx')],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ScanLog',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('level', models.CharField(choices=[('info', 'Info'), ('warning', 'Warning'), ('error', 'Error')], default='info', help_text='日志级别', max_length=10)),
|
||||
('content', models.TextField(help_text='日志内容')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, db_index=True, help_text='创建时间')),
|
||||
('scan', models.ForeignKey(db_index=True, help_text='关联的扫描任务', on_delete=django.db.models.deletion.CASCADE, related_name='logs', to='scan.scan')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': '扫描日志',
|
||||
'verbose_name_plural': '扫描日志',
|
||||
'db_table': 'scan_log',
|
||||
'ordering': ['created_at'],
|
||||
'indexes': [models.Index(fields=['scan', 'created_at'], name='scan_log_scan_id_e8c8f5_idx')],
|
||||
},
|
||||
migrations.AddIndex(
|
||||
model_name='scan',
|
||||
index=models.Index(fields=['-created_at'], name='scan_created_0bb6c7_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scan',
|
||||
index=models.Index(fields=['target'], name='scan_target__718b9d_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scan',
|
||||
index=models.Index(fields=['deleted_at', '-created_at'], name='scan_deleted_eb17e8_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scanlog',
|
||||
index=models.Index(fields=['scan', 'created_at'], name='scan_log_scan_id_c4814a_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scheduledscan',
|
||||
index=models.Index(fields=['-created_at'], name='scheduled_s_created_9b9c2e_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scheduledscan',
|
||||
index=models.Index(fields=['is_enabled', '-created_at'], name='scheduled_s_is_enab_23d660_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='scheduledscan',
|
||||
index=models.Index(fields=['name'], name='scheduled_s_name_bf332d_idx'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,244 +0,0 @@
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
from ..common.definitions import ScanStatus
|
||||
|
||||
|
||||
|
||||
|
||||
class SoftDeleteManager(models.Manager):
|
||||
"""软删除管理器:默认只返回未删除的记录"""
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(deleted_at__isnull=True)
|
||||
|
||||
|
||||
class Scan(models.Model):
|
||||
"""扫描任务模型"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
target = models.ForeignKey('targets.Target', on_delete=models.CASCADE, related_name='scans', help_text='扫描目标')
|
||||
|
||||
# 多引擎支持字段
|
||||
engine_ids = ArrayField(
|
||||
models.IntegerField(),
|
||||
default=list,
|
||||
help_text='引擎 ID 列表'
|
||||
)
|
||||
engine_names = models.JSONField(
|
||||
default=list,
|
||||
help_text='引擎名称列表,如 ["引擎A", "引擎B"]'
|
||||
)
|
||||
yaml_configuration = models.TextField(
|
||||
default='',
|
||||
help_text='YAML 格式的扫描配置'
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='任务创建时间')
|
||||
stopped_at = models.DateTimeField(null=True, blank=True, help_text='扫描结束时间')
|
||||
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=ScanStatus.choices,
|
||||
default=ScanStatus.INITIATED,
|
||||
db_index=True,
|
||||
help_text='任务状态'
|
||||
)
|
||||
|
||||
results_dir = models.CharField(max_length=100, blank=True, default='', help_text='结果存储目录')
|
||||
|
||||
container_ids = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
default=list,
|
||||
help_text='容器 ID 列表(Docker Container ID)'
|
||||
)
|
||||
|
||||
worker = models.ForeignKey(
|
||||
'engine.WorkerNode',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='执行扫描的 Worker 节点'
|
||||
)
|
||||
|
||||
error_message = models.CharField(max_length=2000, blank=True, default='', help_text='错误信息')
|
||||
|
||||
# ==================== 软删除字段 ====================
|
||||
deleted_at = models.DateTimeField(null=True, blank=True, db_index=True, help_text='删除时间(NULL表示未删除)')
|
||||
|
||||
# ==================== 管理器 ====================
|
||||
objects = SoftDeleteManager() # 默认管理器:只返回未删除的记录
|
||||
all_objects = models.Manager() # 全量管理器:包括已删除的记录(用于硬删除)
|
||||
|
||||
# ==================== 进度跟踪字段 ====================
|
||||
progress = models.IntegerField(default=0, help_text='扫描进度 0-100')
|
||||
current_stage = models.CharField(max_length=50, blank=True, default='', help_text='当前扫描阶段')
|
||||
stage_progress = models.JSONField(default=dict, help_text='各阶段进度详情')
|
||||
|
||||
# ==================== 缓存统计字段 ====================
|
||||
cached_subdomains_count = models.IntegerField(default=0, help_text='缓存的子域名数量')
|
||||
cached_websites_count = models.IntegerField(default=0, help_text='缓存的网站数量')
|
||||
cached_endpoints_count = models.IntegerField(default=0, help_text='缓存的端点数量')
|
||||
cached_ips_count = models.IntegerField(default=0, help_text='缓存的IP地址数量')
|
||||
cached_directories_count = models.IntegerField(default=0, help_text='缓存的目录数量')
|
||||
cached_vulns_total = models.IntegerField(default=0, help_text='缓存的漏洞总数')
|
||||
cached_vulns_critical = models.IntegerField(default=0, help_text='缓存的严重漏洞数量')
|
||||
cached_vulns_high = models.IntegerField(default=0, help_text='缓存的高危漏洞数量')
|
||||
cached_vulns_medium = models.IntegerField(default=0, help_text='缓存的中危漏洞数量')
|
||||
cached_vulns_low = models.IntegerField(default=0, help_text='缓存的低危漏洞数量')
|
||||
stats_updated_at = models.DateTimeField(null=True, blank=True, help_text='统计数据最后更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scan'
|
||||
verbose_name = '扫描任务'
|
||||
verbose_name_plural = '扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']), # 优化按创建时间降序排序(list 查询的默认排序)
|
||||
models.Index(fields=['target']), # 优化按目标查询扫描任务
|
||||
models.Index(fields=['deleted_at', '-created_at']), # 软删除 + 时间索引
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Scan #{self.id} - {self.target.name}"
|
||||
|
||||
|
||||
class ScanLog(models.Model):
|
||||
"""扫描日志模型
|
||||
|
||||
存储扫描过程中的关键处理日志,用于前端实时查看扫描进度。
|
||||
|
||||
日志类型:
|
||||
- 阶段开始/完成/失败
|
||||
- 处理进度(如 "Progress: 50/120")
|
||||
- 发现结果统计(如 "Found 120 subdomains")
|
||||
- 错误信息
|
||||
|
||||
日志格式:[stage_name] message
|
||||
"""
|
||||
|
||||
class Level(models.TextChoices):
|
||||
INFO = 'info', 'Info'
|
||||
WARNING = 'warning', 'Warning'
|
||||
ERROR = 'error', 'Error'
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
scan = models.ForeignKey(
|
||||
'Scan',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='logs',
|
||||
db_index=True,
|
||||
help_text='关联的扫描任务'
|
||||
)
|
||||
level = models.CharField(
|
||||
max_length=10,
|
||||
choices=Level.choices,
|
||||
default=Level.INFO,
|
||||
help_text='日志级别'
|
||||
)
|
||||
content = models.TextField(help_text='日志内容')
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True, help_text='创建时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scan_log'
|
||||
verbose_name = '扫描日志'
|
||||
verbose_name_plural = '扫描日志'
|
||||
ordering = ['created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['scan', 'created_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.level}] {self.content[:50]}"
|
||||
|
||||
|
||||
class ScheduledScan(models.Model):
|
||||
"""
|
||||
定时扫描任务模型
|
||||
|
||||
调度机制:
|
||||
- APScheduler 每分钟检查 next_run_time
|
||||
- 到期任务通过 task_distributor 分发到 Worker 执行
|
||||
- 支持 cron 表达式进行灵活调度
|
||||
|
||||
扫描模式(二选一):
|
||||
- 组织扫描:设置 organization,执行时动态获取组织下所有目标
|
||||
- 目标扫描:设置 target,扫描单个目标
|
||||
- organization 优先级高于 target
|
||||
"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
# 基本信息
|
||||
name = models.CharField(max_length=200, help_text='任务名称')
|
||||
|
||||
# 多引擎支持字段
|
||||
engine_ids = ArrayField(
|
||||
models.IntegerField(),
|
||||
default=list,
|
||||
help_text='引擎 ID 列表'
|
||||
)
|
||||
engine_names = models.JSONField(
|
||||
default=list,
|
||||
help_text='引擎名称列表,如 ["引擎A", "引擎B"]'
|
||||
)
|
||||
yaml_configuration = models.TextField(
|
||||
default='',
|
||||
help_text='YAML 格式的扫描配置'
|
||||
)
|
||||
|
||||
# 关联的组织(组织扫描模式:执行时动态获取组织下所有目标)
|
||||
organization = models.ForeignKey(
|
||||
'targets.Organization',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描组织(设置后执行时动态获取组织下所有目标)'
|
||||
)
|
||||
|
||||
# 关联的目标(目标扫描模式:扫描单个目标)
|
||||
target = models.ForeignKey(
|
||||
'targets.Target',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描单个目标(与 organization 二选一)'
|
||||
)
|
||||
|
||||
# 调度配置 - 直接使用 Cron 表达式
|
||||
cron_expression = models.CharField(
|
||||
max_length=100,
|
||||
default='0 2 * * *',
|
||||
help_text='Cron 表达式,格式:分 时 日 月 周'
|
||||
)
|
||||
|
||||
# 状态
|
||||
is_enabled = models.BooleanField(default=True, db_index=True, help_text='是否启用')
|
||||
|
||||
# 执行统计
|
||||
run_count = models.IntegerField(default=0, help_text='已执行次数')
|
||||
last_run_time = models.DateTimeField(null=True, blank=True, help_text='上次执行时间')
|
||||
next_run_time = models.DateTimeField(null=True, blank=True, help_text='下次执行时间')
|
||||
|
||||
# 时间戳
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
|
||||
updated_at = models.DateTimeField(auto_now=True, help_text='更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scheduled_scan'
|
||||
verbose_name = '定时扫描任务'
|
||||
verbose_name_plural = '定时扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']),
|
||||
models.Index(fields=['is_enabled', '-created_at']),
|
||||
models.Index(fields=['name']), # 优化 name 搜索
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"ScheduledScan #{self.id} - {self.name}"
|
||||
18
backend/apps/scan/models/__init__.py
Normal file
18
backend/apps/scan/models/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Scan Models - 统一导出"""
|
||||
|
||||
from .scan_models import Scan, SoftDeleteManager
|
||||
from .scan_log_model import ScanLog
|
||||
from .scheduled_scan_model import ScheduledScan
|
||||
from .subfinder_provider_settings_model import SubfinderProviderSettings
|
||||
|
||||
# 兼容旧名称(已废弃,请使用 SubfinderProviderSettings)
|
||||
ProviderSettings = SubfinderProviderSettings
|
||||
|
||||
__all__ = [
|
||||
'Scan',
|
||||
'ScanLog',
|
||||
'ScheduledScan',
|
||||
'SoftDeleteManager',
|
||||
'SubfinderProviderSettings',
|
||||
'ProviderSettings', # 兼容旧名称
|
||||
]
|
||||
41
backend/apps/scan/models/scan_log_model.py
Normal file
41
backend/apps/scan/models/scan_log_model.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""扫描日志模型"""
|
||||
|
||||
from django.db import models
|
||||
|
||||
|
||||
class ScanLog(models.Model):
|
||||
"""扫描日志模型"""
|
||||
|
||||
class Level(models.TextChoices):
|
||||
INFO = 'info', 'Info'
|
||||
WARNING = 'warning', 'Warning'
|
||||
ERROR = 'error', 'Error'
|
||||
|
||||
id = models.BigAutoField(primary_key=True)
|
||||
scan = models.ForeignKey(
|
||||
'Scan',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='logs',
|
||||
db_index=True,
|
||||
help_text='关联的扫描任务'
|
||||
)
|
||||
level = models.CharField(
|
||||
max_length=10,
|
||||
choices=Level.choices,
|
||||
default=Level.INFO,
|
||||
help_text='日志级别'
|
||||
)
|
||||
content = models.TextField(help_text='日志内容')
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True, help_text='创建时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scan_log'
|
||||
verbose_name = '扫描日志'
|
||||
verbose_name_plural = '扫描日志'
|
||||
ordering = ['created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['scan', 'created_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.level}] {self.content[:50]}"
|
||||
106
backend/apps/scan/models/scan_models.py
Normal file
106
backend/apps/scan/models/scan_models.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""扫描相关模型"""
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
from apps.common.definitions import ScanStatus
|
||||
|
||||
|
||||
class SoftDeleteManager(models.Manager):
|
||||
"""软删除管理器:默认只返回未删除的记录"""
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(deleted_at__isnull=True)
|
||||
|
||||
|
||||
class Scan(models.Model):
|
||||
"""扫描任务模型"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
target = models.ForeignKey('targets.Target', on_delete=models.CASCADE, related_name='scans', help_text='扫描目标')
|
||||
|
||||
# 多引擎支持字段
|
||||
engine_ids = ArrayField(
|
||||
models.IntegerField(),
|
||||
default=list,
|
||||
help_text='引擎 ID 列表'
|
||||
)
|
||||
engine_names = models.JSONField(
|
||||
default=list,
|
||||
help_text='引擎名称列表,如 ["引擎A", "引擎B"]'
|
||||
)
|
||||
yaml_configuration = models.TextField(
|
||||
default='',
|
||||
help_text='YAML 格式的扫描配置'
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='任务创建时间')
|
||||
stopped_at = models.DateTimeField(null=True, blank=True, help_text='扫描结束时间')
|
||||
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=ScanStatus.choices,
|
||||
default=ScanStatus.INITIATED,
|
||||
db_index=True,
|
||||
help_text='任务状态'
|
||||
)
|
||||
|
||||
results_dir = models.CharField(max_length=100, blank=True, default='', help_text='结果存储目录')
|
||||
|
||||
container_ids = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
default=list,
|
||||
help_text='容器 ID 列表(Docker Container ID)'
|
||||
)
|
||||
|
||||
worker = models.ForeignKey(
|
||||
'engine.WorkerNode',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='执行扫描的 Worker 节点'
|
||||
)
|
||||
|
||||
error_message = models.CharField(max_length=2000, blank=True, default='', help_text='错误信息')
|
||||
|
||||
# ==================== 软删除字段 ====================
|
||||
deleted_at = models.DateTimeField(null=True, blank=True, db_index=True, help_text='删除时间(NULL表示未删除)')
|
||||
|
||||
# ==================== 管理器 ====================
|
||||
objects = SoftDeleteManager() # 默认管理器:只返回未删除的记录
|
||||
all_objects = models.Manager() # 全量管理器:包括已删除的记录(用于硬删除)
|
||||
|
||||
# ==================== 进度跟踪字段 ====================
|
||||
progress = models.IntegerField(default=0, help_text='扫描进度 0-100')
|
||||
current_stage = models.CharField(max_length=50, blank=True, default='', help_text='当前扫描阶段')
|
||||
stage_progress = models.JSONField(default=dict, help_text='各阶段进度详情')
|
||||
|
||||
# ==================== 缓存统计字段 ====================
|
||||
cached_subdomains_count = models.IntegerField(default=0, help_text='缓存的子域名数量')
|
||||
cached_websites_count = models.IntegerField(default=0, help_text='缓存的网站数量')
|
||||
cached_endpoints_count = models.IntegerField(default=0, help_text='缓存的端点数量')
|
||||
cached_ips_count = models.IntegerField(default=0, help_text='缓存的IP地址数量')
|
||||
cached_directories_count = models.IntegerField(default=0, help_text='缓存的目录数量')
|
||||
cached_vulns_total = models.IntegerField(default=0, help_text='缓存的漏洞总数')
|
||||
cached_vulns_critical = models.IntegerField(default=0, help_text='缓存的严重漏洞数量')
|
||||
cached_vulns_high = models.IntegerField(default=0, help_text='缓存的高危漏洞数量')
|
||||
cached_vulns_medium = models.IntegerField(default=0, help_text='缓存的中危漏洞数量')
|
||||
cached_vulns_low = models.IntegerField(default=0, help_text='缓存的低危漏洞数量')
|
||||
stats_updated_at = models.DateTimeField(null=True, blank=True, help_text='统计数据最后更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scan'
|
||||
verbose_name = '扫描任务'
|
||||
verbose_name_plural = '扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']),
|
||||
models.Index(fields=['target']),
|
||||
models.Index(fields=['deleted_at', '-created_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Scan #{self.id} - {self.target.name}"
|
||||
73
backend/apps/scan/models/scheduled_scan_model.py
Normal file
73
backend/apps/scan/models/scheduled_scan_model.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""定时扫描任务模型"""
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
|
||||
|
||||
class ScheduledScan(models.Model):
|
||||
"""定时扫描任务模型"""
|
||||
|
||||
id = models.AutoField(primary_key=True)
|
||||
|
||||
name = models.CharField(max_length=200, help_text='任务名称')
|
||||
|
||||
engine_ids = ArrayField(
|
||||
models.IntegerField(),
|
||||
default=list,
|
||||
help_text='引擎 ID 列表'
|
||||
)
|
||||
engine_names = models.JSONField(
|
||||
default=list,
|
||||
help_text='引擎名称列表,如 ["引擎A", "引擎B"]'
|
||||
)
|
||||
yaml_configuration = models.TextField(
|
||||
default='',
|
||||
help_text='YAML 格式的扫描配置'
|
||||
)
|
||||
|
||||
organization = models.ForeignKey(
|
||||
'targets.Organization',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描组织(设置后执行时动态获取组织下所有目标)'
|
||||
)
|
||||
|
||||
target = models.ForeignKey(
|
||||
'targets.Target',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='scheduled_scans',
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text='扫描单个目标(与 organization 二选一)'
|
||||
)
|
||||
|
||||
cron_expression = models.CharField(
|
||||
max_length=100,
|
||||
default='0 2 * * *',
|
||||
help_text='Cron 表达式,格式:分 时 日 月 周'
|
||||
)
|
||||
|
||||
is_enabled = models.BooleanField(default=True, db_index=True, help_text='是否启用')
|
||||
|
||||
run_count = models.IntegerField(default=0, help_text='已执行次数')
|
||||
last_run_time = models.DateTimeField(null=True, blank=True, help_text='上次执行时间')
|
||||
next_run_time = models.DateTimeField(null=True, blank=True, help_text='下次执行时间')
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
|
||||
updated_at = models.DateTimeField(auto_now=True, help_text='更新时间')
|
||||
|
||||
class Meta:
|
||||
db_table = 'scheduled_scan'
|
||||
verbose_name = '定时扫描任务'
|
||||
verbose_name_plural = '定时扫描任务'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['-created_at']),
|
||||
models.Index(fields=['is_enabled', '-created_at']),
|
||||
models.Index(fields=['name']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"ScheduledScan #{self.id} - {self.name}"
|
||||
@@ -0,0 +1,64 @@
|
||||
"""Subfinder Provider 配置模型(单例模式)
|
||||
|
||||
用于存储 subfinder 第三方数据源的 API Key 配置
|
||||
"""
|
||||
|
||||
from django.db import models
|
||||
|
||||
|
||||
class SubfinderProviderSettings(models.Model):
|
||||
"""
|
||||
Subfinder Provider 配置(单例模式)
|
||||
存储第三方数据源的 API Key 配置,用于 subfinder 子域名发现
|
||||
|
||||
支持的 Provider:
|
||||
- fofa: email + api_key (composite)
|
||||
- censys: api_id + api_secret (composite)
|
||||
- hunter, shodan, zoomeye, securitytrails, threatbook, quake: api_key (single)
|
||||
"""
|
||||
|
||||
providers = models.JSONField(
|
||||
default=dict,
|
||||
help_text='各 Provider 的 API Key 配置'
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'subfinder_provider_settings'
|
||||
verbose_name = 'Subfinder Provider 配置'
|
||||
verbose_name_plural = 'Subfinder Provider 配置'
|
||||
|
||||
DEFAULT_PROVIDERS = {
|
||||
'fofa': {'enabled': False, 'email': '', 'api_key': ''},
|
||||
'hunter': {'enabled': False, 'api_key': ''},
|
||||
'shodan': {'enabled': False, 'api_key': ''},
|
||||
'censys': {'enabled': False, 'api_id': '', 'api_secret': ''},
|
||||
'zoomeye': {'enabled': False, 'api_key': ''},
|
||||
'securitytrails': {'enabled': False, 'api_key': ''},
|
||||
'threatbook': {'enabled': False, 'api_key': ''},
|
||||
'quake': {'enabled': False, 'api_key': ''},
|
||||
}
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.pk = 1
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls) -> 'SubfinderProviderSettings':
|
||||
"""获取或创建单例实例"""
|
||||
obj, _ = cls.objects.get_or_create(
|
||||
pk=1,
|
||||
defaults={'providers': cls.DEFAULT_PROVIDERS.copy()}
|
||||
)
|
||||
return obj
|
||||
|
||||
def get_provider_config(self, provider: str) -> dict:
|
||||
"""获取指定 Provider 的配置"""
|
||||
return self.providers.get(provider, self.DEFAULT_PROVIDERS.get(provider, {}))
|
||||
|
||||
def is_provider_enabled(self, provider: str) -> bool:
|
||||
"""检查指定 Provider 是否启用"""
|
||||
config = self.get_provider_config(provider)
|
||||
return config.get('enabled', False)
|
||||
@@ -1,411 +0,0 @@
|
||||
from rest_framework import serializers
|
||||
from django.db.models import Count
|
||||
import yaml
|
||||
|
||||
from .models import Scan, ScheduledScan, ScanLog
|
||||
|
||||
|
||||
# ==================== 扫描日志序列化器 ====================
|
||||
|
||||
class ScanLogSerializer(serializers.ModelSerializer):
|
||||
"""扫描日志序列化器"""
|
||||
|
||||
class Meta:
|
||||
model = ScanLog
|
||||
fields = ['id', 'level', 'content', 'created_at']
|
||||
|
||||
|
||||
# ==================== 通用验证 Mixin ====================
|
||||
|
||||
class DuplicateKeyLoader(yaml.SafeLoader):
|
||||
"""自定义 YAML Loader,检测重复 key"""
|
||||
pass
|
||||
|
||||
|
||||
def _check_duplicate_keys(loader, node, deep=False):
|
||||
"""检测 YAML mapping 中的重复 key"""
|
||||
mapping = {}
|
||||
for key_node, value_node in node.value:
|
||||
key = loader.construct_object(key_node, deep=deep)
|
||||
if key in mapping:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
f"发现重复的配置项 '{key}',后面的配置会覆盖前面的配置,请删除重复项", key_node.start_mark
|
||||
)
|
||||
mapping[key] = loader.construct_object(value_node, deep=deep)
|
||||
return mapping
|
||||
|
||||
|
||||
DuplicateKeyLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
_check_duplicate_keys
|
||||
)
|
||||
|
||||
|
||||
class ScanConfigValidationMixin:
|
||||
"""扫描配置验证 Mixin,提供通用的验证方法"""
|
||||
|
||||
def validate_configuration(self, value):
|
||||
"""验证 YAML 配置格式,包括检测重复 key"""
|
||||
import yaml
|
||||
|
||||
if not value or not value.strip():
|
||||
raise serializers.ValidationError("configuration 不能为空")
|
||||
|
||||
try:
|
||||
# 使用自定义 Loader 检测重复 key
|
||||
yaml.load(value, Loader=DuplicateKeyLoader)
|
||||
except yaml.YAMLError as e:
|
||||
raise serializers.ValidationError(f"无效的 YAML 格式: {str(e)}")
|
||||
|
||||
return value
|
||||
|
||||
def validate_engine_ids(self, value):
|
||||
"""验证引擎 ID 列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("engine_ids 不能为空,请至少选择一个扫描引擎")
|
||||
return value
|
||||
|
||||
def validate_engine_names(self, value):
|
||||
"""验证引擎名称列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("engine_names 不能为空")
|
||||
return value
|
||||
|
||||
|
||||
# ==================== 扫描任务序列化器 ====================
|
||||
|
||||
|
||||
class ScanSerializer(serializers.ModelSerializer):
|
||||
"""扫描任务序列化器"""
|
||||
target_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine_ids', 'engine_names',
|
||||
'created_at', 'stopped_at', 'status', 'results_dir',
|
||||
'container_ids', 'error_message'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'created_at', 'stopped_at', 'results_dir',
|
||||
'container_ids', 'error_message', 'status'
|
||||
]
|
||||
|
||||
def get_target_name(self, obj):
|
||||
"""获取目标名称"""
|
||||
return obj.target.name if obj.target else None
|
||||
|
||||
|
||||
class ScanHistorySerializer(serializers.ModelSerializer):
|
||||
"""扫描历史列表专用序列化器
|
||||
|
||||
为前端扫描历史页面提供优化的数据格式,包括:
|
||||
- 扫描汇总统计(子域名、端点、漏洞数量)
|
||||
- 进度百分比和当前阶段
|
||||
- 执行节点信息
|
||||
"""
|
||||
|
||||
# 字段映射
|
||||
target_name = serializers.CharField(source='target.name', read_only=True)
|
||||
worker_name = serializers.CharField(source='worker.name', read_only=True, allow_null=True)
|
||||
|
||||
# 计算字段
|
||||
summary = serializers.SerializerMethodField()
|
||||
|
||||
# 进度跟踪字段(直接从模型读取)
|
||||
progress = serializers.IntegerField(read_only=True)
|
||||
current_stage = serializers.CharField(read_only=True)
|
||||
stage_progress = serializers.JSONField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine_ids', 'engine_names',
|
||||
'worker_name', 'created_at', 'status', 'error_message', 'summary',
|
||||
'progress', 'current_stage', 'stage_progress'
|
||||
]
|
||||
|
||||
def get_summary(self, obj):
|
||||
"""获取扫描汇总数据。
|
||||
|
||||
设计原则:
|
||||
- 子域名/网站/端点/IP/目录使用缓存字段(避免实时 COUNT)
|
||||
- 漏洞统计使用 Scan 上的缓存字段,在扫描结束时统一聚合
|
||||
"""
|
||||
# 1. 使用缓存字段构建基础统计(子域名、网站、端点、IP、目录)
|
||||
summary = {
|
||||
'subdomains': obj.cached_subdomains_count or 0,
|
||||
'websites': obj.cached_websites_count or 0,
|
||||
'endpoints': obj.cached_endpoints_count or 0,
|
||||
'ips': obj.cached_ips_count or 0,
|
||||
'directories': obj.cached_directories_count or 0,
|
||||
}
|
||||
|
||||
# 2. 使用 Scan 模型上的缓存漏洞统计(按严重性聚合)
|
||||
summary['vulnerabilities'] = {
|
||||
'total': obj.cached_vulns_total or 0,
|
||||
'critical': obj.cached_vulns_critical or 0,
|
||||
'high': obj.cached_vulns_high or 0,
|
||||
'medium': obj.cached_vulns_medium or 0,
|
||||
'low': obj.cached_vulns_low or 0,
|
||||
}
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
class QuickScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""
|
||||
快速扫描序列化器
|
||||
|
||||
功能:
|
||||
- 接收目标列表和 YAML 配置
|
||||
- 自动创建/获取目标
|
||||
- 立即发起扫描
|
||||
"""
|
||||
|
||||
# 批量创建的最大数量限制
|
||||
MAX_BATCH_SIZE = 1000
|
||||
|
||||
# 目标列表
|
||||
targets = serializers.ListField(
|
||||
child=serializers.DictField(),
|
||||
help_text='目标列表,每个目标包含 name 字段'
|
||||
)
|
||||
|
||||
# YAML 配置(必填)
|
||||
configuration = serializers.CharField(
|
||||
required=True,
|
||||
help_text='YAML 格式的扫描配置(必填)'
|
||||
)
|
||||
|
||||
# 扫描引擎 ID 列表(必填,用于记录和显示)
|
||||
engine_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
required=True,
|
||||
help_text='使用的扫描引擎 ID 列表(必填)'
|
||||
)
|
||||
|
||||
# 引擎名称列表(必填,用于记录和显示)
|
||||
engine_names = serializers.ListField(
|
||||
child=serializers.CharField(),
|
||||
required=True,
|
||||
help_text='引擎名称列表(必填)'
|
||||
)
|
||||
|
||||
def validate_targets(self, value):
|
||||
"""验证目标列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("目标列表不能为空")
|
||||
|
||||
# 检查数量限制,防止服务器过载
|
||||
if len(value) > self.MAX_BATCH_SIZE:
|
||||
raise serializers.ValidationError(
|
||||
f"快速扫描最多支持 {self.MAX_BATCH_SIZE} 个目标,当前提交了 {len(value)} 个"
|
||||
)
|
||||
|
||||
# 验证每个目标的必填字段
|
||||
for idx, target in enumerate(value):
|
||||
if 'name' not in target:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标缺少 name 字段")
|
||||
if not target['name']:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标的 name 不能为空")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# ==================== 定时扫描序列化器 ====================
|
||||
|
||||
class ScheduledScanSerializer(serializers.ModelSerializer):
|
||||
"""定时扫描任务序列化器(用于列表和详情)"""
|
||||
|
||||
# 关联字段
|
||||
organization_id = serializers.IntegerField(source='organization.id', read_only=True, allow_null=True)
|
||||
organization_name = serializers.CharField(source='organization.name', read_only=True, allow_null=True)
|
||||
target_id = serializers.IntegerField(source='target.id', read_only=True, allow_null=True)
|
||||
target_name = serializers.CharField(source='target.name', read_only=True, allow_null=True)
|
||||
scan_mode = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ScheduledScan
|
||||
fields = [
|
||||
'id', 'name',
|
||||
'engine_ids', 'engine_names',
|
||||
'organization_id', 'organization_name',
|
||||
'target_id', 'target_name',
|
||||
'scan_mode',
|
||||
'cron_expression',
|
||||
'is_enabled',
|
||||
'run_count', 'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'run_count',
|
||||
'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
|
||||
def get_scan_mode(self, obj):
|
||||
"""获取扫描模式:organization 或 target"""
|
||||
return 'organization' if obj.organization_id else 'target'
|
||||
|
||||
|
||||
class CreateScheduledScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""创建定时扫描任务序列化器
|
||||
|
||||
扫描模式(二选一):
|
||||
- 组织扫描:提供 organization_id,执行时动态获取组织下所有目标
|
||||
- 目标扫描:提供 target_id,扫描单个目标
|
||||
"""
|
||||
|
||||
name = serializers.CharField(max_length=200, help_text='任务名称')
|
||||
|
||||
# YAML 配置(必填)
|
||||
configuration = serializers.CharField(
|
||||
required=True,
|
||||
help_text='YAML 格式的扫描配置(必填)'
|
||||
)
|
||||
|
||||
# 扫描引擎 ID 列表(必填,用于记录和显示)
|
||||
engine_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
required=True,
|
||||
help_text='扫描引擎 ID 列表(必填)'
|
||||
)
|
||||
|
||||
# 引擎名称列表(必填,用于记录和显示)
|
||||
engine_names = serializers.ListField(
|
||||
child=serializers.CharField(),
|
||||
required=True,
|
||||
help_text='引擎名称列表(必填)'
|
||||
)
|
||||
|
||||
# 组织扫描模式
|
||||
organization_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='组织 ID(组织扫描模式:执行时动态获取组织下所有目标)'
|
||||
)
|
||||
|
||||
# 目标扫描模式
|
||||
target_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='目标 ID(目标扫描模式:扫描单个目标)'
|
||||
)
|
||||
|
||||
cron_expression = serializers.CharField(
|
||||
max_length=100,
|
||||
default='0 2 * * *',
|
||||
help_text='Cron 表达式,格式:分 时 日 月 周'
|
||||
)
|
||||
is_enabled = serializers.BooleanField(default=True, help_text='是否立即启用')
|
||||
|
||||
def validate(self, data):
|
||||
"""验证 organization_id 和 target_id 互斥"""
|
||||
organization_id = data.get('organization_id')
|
||||
target_id = data.get('target_id')
|
||||
|
||||
if not organization_id and not target_id:
|
||||
raise serializers.ValidationError('必须提供 organization_id 或 target_id 其中之一')
|
||||
|
||||
if organization_id and target_id:
|
||||
raise serializers.ValidationError('organization_id 和 target_id 只能提供其中之一')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class InitiateScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""发起扫描任务序列化器
|
||||
|
||||
扫描模式(二选一):
|
||||
- 组织扫描:提供 organization_id,扫描组织下所有目标
|
||||
- 目标扫描:提供 target_id,扫描单个目标
|
||||
"""
|
||||
|
||||
# YAML 配置(必填)
|
||||
configuration = serializers.CharField(
|
||||
required=True,
|
||||
help_text='YAML 格式的扫描配置(必填)'
|
||||
)
|
||||
|
||||
# 扫描引擎 ID 列表(必填)
|
||||
engine_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
required=True,
|
||||
help_text='扫描引擎 ID 列表(必填)'
|
||||
)
|
||||
|
||||
# 引擎名称列表(必填)
|
||||
engine_names = serializers.ListField(
|
||||
child=serializers.CharField(),
|
||||
required=True,
|
||||
help_text='引擎名称列表(必填)'
|
||||
)
|
||||
|
||||
# 组织扫描模式
|
||||
organization_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='组织 ID(组织扫描模式)'
|
||||
)
|
||||
|
||||
# 目标扫描模式
|
||||
target_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='目标 ID(目标扫描模式)'
|
||||
)
|
||||
|
||||
def validate(self, data):
|
||||
"""验证 organization_id 和 target_id 互斥"""
|
||||
organization_id = data.get('organization_id')
|
||||
target_id = data.get('target_id')
|
||||
|
||||
if not organization_id and not target_id:
|
||||
raise serializers.ValidationError('必须提供 organization_id 或 target_id 其中之一')
|
||||
|
||||
if organization_id and target_id:
|
||||
raise serializers.ValidationError('organization_id 和 target_id 只能提供其中之一')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class UpdateScheduledScanSerializer(serializers.Serializer):
|
||||
"""更新定时扫描任务序列化器"""
|
||||
|
||||
name = serializers.CharField(max_length=200, required=False, help_text='任务名称')
|
||||
engine_ids = serializers.ListField(
|
||||
child=serializers.IntegerField(),
|
||||
required=False,
|
||||
help_text='扫描引擎 ID 列表'
|
||||
)
|
||||
|
||||
# 组织扫描模式
|
||||
organization_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='组织 ID(设置后清空 target_id)'
|
||||
)
|
||||
|
||||
# 目标扫描模式
|
||||
target_id = serializers.IntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text='目标 ID(设置后清空 organization_id)'
|
||||
)
|
||||
|
||||
cron_expression = serializers.CharField(max_length=100, required=False, help_text='Cron 表达式')
|
||||
is_enabled = serializers.BooleanField(required=False, help_text='是否启用')
|
||||
|
||||
def validate_engine_ids(self, value):
|
||||
"""验证引擎 ID 列表"""
|
||||
if value is not None and not value:
|
||||
raise serializers.ValidationError("engine_ids 不能为空")
|
||||
return value
|
||||
|
||||
|
||||
class ToggleScheduledScanSerializer(serializers.Serializer):
|
||||
"""切换定时扫描启用状态序列化器"""
|
||||
|
||||
is_enabled = serializers.BooleanField(help_text='是否启用')
|
||||
40
backend/apps/scan/serializers/__init__.py
Normal file
40
backend/apps/scan/serializers/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Scan Serializers - 统一导出"""
|
||||
|
||||
from .mixins import ScanConfigValidationMixin
|
||||
from .scan_serializers import (
|
||||
ScanSerializer,
|
||||
ScanHistorySerializer,
|
||||
QuickScanSerializer,
|
||||
InitiateScanSerializer,
|
||||
)
|
||||
from .scan_log_serializers import ScanLogSerializer
|
||||
from .scheduled_scan_serializers import (
|
||||
ScheduledScanSerializer,
|
||||
CreateScheduledScanSerializer,
|
||||
UpdateScheduledScanSerializer,
|
||||
ToggleScheduledScanSerializer,
|
||||
)
|
||||
from .subfinder_provider_settings_serializers import SubfinderProviderSettingsSerializer
|
||||
|
||||
# 兼容旧名称
|
||||
ProviderSettingsSerializer = SubfinderProviderSettingsSerializer
|
||||
|
||||
__all__ = [
|
||||
# Mixins
|
||||
'ScanConfigValidationMixin',
|
||||
# Scan
|
||||
'ScanSerializer',
|
||||
'ScanHistorySerializer',
|
||||
'QuickScanSerializer',
|
||||
'InitiateScanSerializer',
|
||||
# ScanLog
|
||||
'ScanLogSerializer',
|
||||
# Scheduled Scan
|
||||
'ScheduledScanSerializer',
|
||||
'CreateScheduledScanSerializer',
|
||||
'UpdateScheduledScanSerializer',
|
||||
'ToggleScheduledScanSerializer',
|
||||
# Subfinder Provider Settings
|
||||
'SubfinderProviderSettingsSerializer',
|
||||
'ProviderSettingsSerializer', # 兼容旧名称
|
||||
]
|
||||
57
backend/apps/scan/serializers/mixins.py
Normal file
57
backend/apps/scan/serializers/mixins.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""序列化器通用 Mixin 和工具类"""
|
||||
|
||||
from rest_framework import serializers
|
||||
import yaml
|
||||
|
||||
|
||||
class DuplicateKeyLoader(yaml.SafeLoader):
|
||||
"""自定义 YAML Loader,检测重复 key"""
|
||||
pass
|
||||
|
||||
|
||||
def _check_duplicate_keys(loader, node, deep=False):
|
||||
"""检测 YAML mapping 中的重复 key"""
|
||||
mapping = {}
|
||||
for key_node, value_node in node.value:
|
||||
key = loader.construct_object(key_node, deep=deep)
|
||||
if key in mapping:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
f"发现重复的配置项 '{key}',后面的配置会覆盖前面的配置,请删除重复项", key_node.start_mark
|
||||
)
|
||||
mapping[key] = loader.construct_object(value_node, deep=deep)
|
||||
return mapping
|
||||
|
||||
|
||||
DuplicateKeyLoader.add_constructor(
|
||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
||||
_check_duplicate_keys
|
||||
)
|
||||
|
||||
|
||||
class ScanConfigValidationMixin:
|
||||
"""扫描配置验证 Mixin"""
|
||||
|
||||
def validate_configuration(self, value):
|
||||
"""验证 YAML 配置格式"""
|
||||
if not value or not value.strip():
|
||||
raise serializers.ValidationError("configuration 不能为空")
|
||||
|
||||
try:
|
||||
yaml.load(value, Loader=DuplicateKeyLoader)
|
||||
except yaml.YAMLError as e:
|
||||
raise serializers.ValidationError(f"无效的 YAML 格式: {str(e)}")
|
||||
|
||||
return value
|
||||
|
||||
def validate_engine_ids(self, value):
|
||||
"""验证引擎 ID 列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("engine_ids 不能为空,请至少选择一个扫描引擎")
|
||||
return value
|
||||
|
||||
def validate_engine_names(self, value):
|
||||
"""验证引擎名称列表"""
|
||||
if not value:
|
||||
raise serializers.ValidationError("engine_names 不能为空")
|
||||
return value
|
||||
13
backend/apps/scan/serializers/scan_log_serializers.py
Normal file
13
backend/apps/scan/serializers/scan_log_serializers.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""扫描日志序列化器"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from ..models import ScanLog
|
||||
|
||||
|
||||
class ScanLogSerializer(serializers.ModelSerializer):
|
||||
"""扫描日志序列化器"""
|
||||
|
||||
class Meta:
|
||||
model = ScanLog
|
||||
fields = ['id', 'level', 'content', 'created_at']
|
||||
111
backend/apps/scan/serializers/scan_serializers.py
Normal file
111
backend/apps/scan/serializers/scan_serializers.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""扫描任务序列化器"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from ..models import Scan
|
||||
from .mixins import ScanConfigValidationMixin
|
||||
|
||||
|
||||
class ScanSerializer(serializers.ModelSerializer):
|
||||
"""扫描任务序列化器"""
|
||||
target_name = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine_ids', 'engine_names',
|
||||
'created_at', 'stopped_at', 'status', 'results_dir',
|
||||
'container_ids', 'error_message'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'created_at', 'stopped_at', 'results_dir',
|
||||
'container_ids', 'error_message', 'status'
|
||||
]
|
||||
|
||||
def get_target_name(self, obj):
|
||||
return obj.target.name if obj.target else None
|
||||
|
||||
|
||||
class ScanHistorySerializer(serializers.ModelSerializer):
|
||||
"""扫描历史列表序列化器"""
|
||||
|
||||
target_name = serializers.CharField(source='target.name', read_only=True)
|
||||
worker_name = serializers.CharField(source='worker.name', read_only=True, allow_null=True)
|
||||
summary = serializers.SerializerMethodField()
|
||||
progress = serializers.IntegerField(read_only=True)
|
||||
current_stage = serializers.CharField(read_only=True)
|
||||
stage_progress = serializers.JSONField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = [
|
||||
'id', 'target', 'target_name', 'engine_ids', 'engine_names',
|
||||
'worker_name', 'created_at', 'status', 'error_message', 'summary',
|
||||
'progress', 'current_stage', 'stage_progress'
|
||||
]
|
||||
|
||||
def get_summary(self, obj):
|
||||
summary = {
|
||||
'subdomains': obj.cached_subdomains_count or 0,
|
||||
'websites': obj.cached_websites_count or 0,
|
||||
'endpoints': obj.cached_endpoints_count or 0,
|
||||
'ips': obj.cached_ips_count or 0,
|
||||
'directories': obj.cached_directories_count or 0,
|
||||
}
|
||||
summary['vulnerabilities'] = {
|
||||
'total': obj.cached_vulns_total or 0,
|
||||
'critical': obj.cached_vulns_critical or 0,
|
||||
'high': obj.cached_vulns_high or 0,
|
||||
'medium': obj.cached_vulns_medium or 0,
|
||||
'low': obj.cached_vulns_low or 0,
|
||||
}
|
||||
return summary
|
||||
|
||||
|
||||
class QuickScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""快速扫描序列化器"""
|
||||
|
||||
MAX_BATCH_SIZE = 1000
|
||||
|
||||
targets = serializers.ListField(
|
||||
child=serializers.DictField(),
|
||||
help_text='目标列表,每个目标包含 name 字段'
|
||||
)
|
||||
configuration = serializers.CharField(required=True, help_text='YAML 格式的扫描配置')
|
||||
engine_ids = serializers.ListField(child=serializers.IntegerField(), required=True)
|
||||
engine_names = serializers.ListField(child=serializers.CharField(), required=True)
|
||||
|
||||
def validate_targets(self, value):
|
||||
if not value:
|
||||
raise serializers.ValidationError("目标列表不能为空")
|
||||
if len(value) > self.MAX_BATCH_SIZE:
|
||||
raise serializers.ValidationError(
|
||||
f"快速扫描最多支持 {self.MAX_BATCH_SIZE} 个目标,当前提交了 {len(value)} 个"
|
||||
)
|
||||
for idx, target in enumerate(value):
|
||||
if 'name' not in target:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标缺少 name 字段")
|
||||
if not target['name']:
|
||||
raise serializers.ValidationError(f"第 {idx + 1} 个目标的 name 不能为空")
|
||||
return value
|
||||
|
||||
|
||||
class InitiateScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""发起扫描任务序列化器"""
|
||||
|
||||
configuration = serializers.CharField(required=True, help_text='YAML 格式的扫描配置')
|
||||
engine_ids = serializers.ListField(child=serializers.IntegerField(), required=True)
|
||||
engine_names = serializers.ListField(child=serializers.CharField(), required=True)
|
||||
organization_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
target_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
def validate(self, data):
|
||||
organization_id = data.get('organization_id')
|
||||
target_id = data.get('target_id')
|
||||
|
||||
if not organization_id and not target_id:
|
||||
raise serializers.ValidationError('必须提供 organization_id 或 target_id 其中之一')
|
||||
if organization_id and target_id:
|
||||
raise serializers.ValidationError('organization_id 和 target_id 只能提供其中之一')
|
||||
|
||||
return data
|
||||
84
backend/apps/scan/serializers/scheduled_scan_serializers.py
Normal file
84
backend/apps/scan/serializers/scheduled_scan_serializers.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""定时扫描序列化器"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from ..models import ScheduledScan
|
||||
from .mixins import ScanConfigValidationMixin
|
||||
|
||||
|
||||
class ScheduledScanSerializer(serializers.ModelSerializer):
|
||||
"""定时扫描任务序列化器(用于列表和详情)"""
|
||||
|
||||
organization_id = serializers.IntegerField(source='organization.id', read_only=True, allow_null=True)
|
||||
organization_name = serializers.CharField(source='organization.name', read_only=True, allow_null=True)
|
||||
target_id = serializers.IntegerField(source='target.id', read_only=True, allow_null=True)
|
||||
target_name = serializers.CharField(source='target.name', read_only=True, allow_null=True)
|
||||
scan_mode = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = ScheduledScan
|
||||
fields = [
|
||||
'id', 'name',
|
||||
'engine_ids', 'engine_names',
|
||||
'organization_id', 'organization_name',
|
||||
'target_id', 'target_name',
|
||||
'scan_mode',
|
||||
'cron_expression',
|
||||
'is_enabled',
|
||||
'run_count', 'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
read_only_fields = [
|
||||
'id', 'run_count',
|
||||
'last_run_time', 'next_run_time',
|
||||
'created_at', 'updated_at'
|
||||
]
|
||||
|
||||
def get_scan_mode(self, obj):
|
||||
return 'organization' if obj.organization_id else 'target'
|
||||
|
||||
|
||||
class CreateScheduledScanSerializer(ScanConfigValidationMixin, serializers.Serializer):
|
||||
"""创建定时扫描任务序列化器"""
|
||||
|
||||
name = serializers.CharField(max_length=200, help_text='任务名称')
|
||||
configuration = serializers.CharField(required=True, help_text='YAML 格式的扫描配置')
|
||||
engine_ids = serializers.ListField(child=serializers.IntegerField(), required=True)
|
||||
engine_names = serializers.ListField(child=serializers.CharField(), required=True)
|
||||
organization_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
target_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
cron_expression = serializers.CharField(max_length=100, default='0 2 * * *')
|
||||
is_enabled = serializers.BooleanField(default=True)
|
||||
|
||||
def validate(self, data):
|
||||
organization_id = data.get('organization_id')
|
||||
target_id = data.get('target_id')
|
||||
|
||||
if not organization_id and not target_id:
|
||||
raise serializers.ValidationError('必须提供 organization_id 或 target_id 其中之一')
|
||||
if organization_id and target_id:
|
||||
raise serializers.ValidationError('organization_id 和 target_id 只能提供其中之一')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class UpdateScheduledScanSerializer(serializers.Serializer):
|
||||
"""更新定时扫描任务序列化器"""
|
||||
|
||||
name = serializers.CharField(max_length=200, required=False)
|
||||
engine_ids = serializers.ListField(child=serializers.IntegerField(), required=False)
|
||||
organization_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
target_id = serializers.IntegerField(required=False, allow_null=True)
|
||||
cron_expression = serializers.CharField(max_length=100, required=False)
|
||||
is_enabled = serializers.BooleanField(required=False)
|
||||
|
||||
def validate_engine_ids(self, value):
|
||||
if value is not None and not value:
|
||||
raise serializers.ValidationError("engine_ids 不能为空")
|
||||
return value
|
||||
|
||||
|
||||
class ToggleScheduledScanSerializer(serializers.Serializer):
|
||||
"""切换定时扫描启用状态序列化器"""
|
||||
|
||||
is_enabled = serializers.BooleanField(help_text='是否启用')
|
||||
@@ -0,0 +1,55 @@
|
||||
"""Subfinder Provider 配置序列化器"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class SubfinderProviderSettingsSerializer(serializers.Serializer):
|
||||
"""Subfinder Provider 配置序列化器
|
||||
|
||||
支持的 Provider:
|
||||
- fofa: email + api_key (composite)
|
||||
- censys: api_id + api_secret (composite)
|
||||
- hunter, shodan, zoomeye, securitytrails, threatbook, quake: api_key (single)
|
||||
|
||||
注意:djangorestframework-camel-case 会自动处理 camelCase <-> snake_case 转换
|
||||
所以这里统一使用 snake_case
|
||||
"""
|
||||
|
||||
VALID_PROVIDERS = {
|
||||
'fofa', 'hunter', 'shodan', 'censys',
|
||||
'zoomeye', 'securitytrails', 'threatbook', 'quake'
|
||||
}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""验证并转换输入数据"""
|
||||
if not isinstance(data, dict):
|
||||
raise serializers.ValidationError('Expected a dictionary')
|
||||
|
||||
result = {}
|
||||
for provider, config in data.items():
|
||||
if provider not in self.VALID_PROVIDERS:
|
||||
continue
|
||||
|
||||
if not isinstance(config, dict):
|
||||
continue
|
||||
|
||||
db_config = {'enabled': bool(config.get('enabled', False))}
|
||||
|
||||
if provider == 'fofa':
|
||||
db_config['email'] = str(config.get('email', ''))
|
||||
db_config['api_key'] = str(config.get('api_key', ''))
|
||||
elif provider == 'censys':
|
||||
db_config['api_id'] = str(config.get('api_id', ''))
|
||||
db_config['api_secret'] = str(config.get('api_secret', ''))
|
||||
else:
|
||||
db_config['api_key'] = str(config.get('api_key', ''))
|
||||
|
||||
result[provider] = db_config
|
||||
|
||||
return result
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""输出数据(数据库格式,camel-case 中间件会自动转换)"""
|
||||
if isinstance(instance, dict):
|
||||
return instance
|
||||
return instance.providers if hasattr(instance, 'providers') else {}
|
||||
@@ -17,7 +17,6 @@ from .scan_state_service import ScanStateService
|
||||
from .scan_control_service import ScanControlService
|
||||
from .scan_stats_service import ScanStatsService
|
||||
from .scheduled_scan_service import ScheduledScanService
|
||||
from .blacklist_service import BlacklistService
|
||||
from .target_export_service import TargetExportService
|
||||
|
||||
__all__ = [
|
||||
@@ -27,7 +26,6 @@ __all__ = [
|
||||
'ScanControlService',
|
||||
'ScanStatsService',
|
||||
'ScheduledScanService',
|
||||
'BlacklistService', # 黑名单过滤服务
|
||||
'TargetExportService', # 目标导出服务
|
||||
]
|
||||
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
"""
|
||||
黑名单过滤服务
|
||||
|
||||
过滤敏感域名(如 .gov、.edu、.mil 等)
|
||||
|
||||
当前版本使用默认规则,后续将支持从前端配置加载。
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from django.db.models import QuerySet
|
||||
import re
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BlacklistService:
|
||||
"""
|
||||
黑名单过滤服务 - 过滤敏感域名
|
||||
|
||||
TODO: 后续版本支持从前端配置加载黑名单规则
|
||||
- 用户在开始扫描时配置黑名单 URL、域名、IP
|
||||
- 黑名单规则存储在数据库中,与 Scan 或 Engine 关联
|
||||
"""
|
||||
|
||||
# 默认黑名单正则规则
|
||||
DEFAULT_PATTERNS = [
|
||||
r'\.gov$', # .gov 结尾
|
||||
r'\.gov\.[a-z]{2}$', # .gov.cn, .gov.uk 等
|
||||
]
|
||||
|
||||
def __init__(self, patterns: Optional[List[str]] = None):
|
||||
"""
|
||||
初始化黑名单服务
|
||||
|
||||
Args:
|
||||
patterns: 正则表达式列表,None 使用默认规则
|
||||
"""
|
||||
self.patterns = patterns or self.DEFAULT_PATTERNS
|
||||
self._compiled_patterns = [re.compile(p) for p in self.patterns]
|
||||
|
||||
def filter_queryset(
|
||||
self,
|
||||
queryset: QuerySet,
|
||||
url_field: str = 'url'
|
||||
) -> QuerySet:
|
||||
"""
|
||||
数据库层面过滤 queryset
|
||||
|
||||
使用 PostgreSQL 正则表达式排除黑名单 URL
|
||||
|
||||
Args:
|
||||
queryset: 原始 queryset
|
||||
url_field: URL 字段名
|
||||
|
||||
Returns:
|
||||
QuerySet: 过滤后的 queryset
|
||||
"""
|
||||
for pattern in self.patterns:
|
||||
queryset = queryset.exclude(**{f'{url_field}__regex': pattern})
|
||||
return queryset
|
||||
|
||||
def filter_url(self, url: str) -> bool:
|
||||
"""
|
||||
检查单个 URL 是否通过黑名单过滤
|
||||
|
||||
Args:
|
||||
url: 要检查的 URL
|
||||
|
||||
Returns:
|
||||
bool: True 表示通过(不在黑名单),False 表示被过滤
|
||||
"""
|
||||
for pattern in self._compiled_patterns:
|
||||
if pattern.search(url):
|
||||
return False
|
||||
return True
|
||||
|
||||
# TODO: 后续版本实现
|
||||
# @classmethod
|
||||
# def from_scan(cls, scan_id: int) -> 'BlacklistService':
|
||||
# """从数据库加载扫描配置的黑名单规则"""
|
||||
# pass
|
||||
138
backend/apps/scan/services/subfinder_provider_config_service.py
Normal file
138
backend/apps/scan/services/subfinder_provider_config_service.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""Subfinder Provider 配置文件生成服务
|
||||
|
||||
负责生成 subfinder 的 provider-config.yaml 配置文件
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import yaml
|
||||
|
||||
from ..models import SubfinderProviderSettings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SubfinderProviderConfigService:
|
||||
"""Subfinder Provider 配置文件生成服务"""
|
||||
|
||||
# Provider 格式定义
|
||||
PROVIDER_FORMATS = {
|
||||
'fofa': {'type': 'composite', 'format': '{email}:{api_key}'},
|
||||
'censys': {'type': 'composite', 'format': '{api_id}:{api_secret}'},
|
||||
'hunter': {'type': 'single', 'field': 'api_key'},
|
||||
'shodan': {'type': 'single', 'field': 'api_key'},
|
||||
'zoomeye': {'type': 'single', 'field': 'api_key'},
|
||||
'securitytrails': {'type': 'single', 'field': 'api_key'},
|
||||
'threatbook': {'type': 'single', 'field': 'api_key'},
|
||||
'quake': {'type': 'single', 'field': 'api_key'},
|
||||
}
|
||||
|
||||
def generate(self, output_dir: str) -> Optional[str]:
|
||||
"""
|
||||
生成 provider-config.yaml 文件
|
||||
|
||||
Args:
|
||||
output_dir: 输出目录路径
|
||||
|
||||
Returns:
|
||||
生成的配置文件路径,如果没有启用的 provider 则返回 None
|
||||
"""
|
||||
settings = SubfinderProviderSettings.get_instance()
|
||||
|
||||
config = {}
|
||||
has_enabled = False
|
||||
|
||||
for provider, format_info in self.PROVIDER_FORMATS.items():
|
||||
provider_config = settings.providers.get(provider, {})
|
||||
|
||||
if not provider_config.get('enabled'):
|
||||
config[provider] = []
|
||||
continue
|
||||
|
||||
value = self._build_provider_value(provider, provider_config)
|
||||
if value:
|
||||
config[provider] = [value] # 单个 key 放入数组
|
||||
has_enabled = True
|
||||
logger.debug(f"Provider {provider} 已启用")
|
||||
else:
|
||||
config[provider] = []
|
||||
|
||||
# 检查是否有任何启用的 provider
|
||||
if not has_enabled:
|
||||
logger.info("没有启用的 Provider,跳过配置文件生成")
|
||||
return None
|
||||
|
||||
# 确保输出目录存在
|
||||
output_path = Path(output_dir) / 'provider-config.yaml'
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 写入 YAML 文件(使用默认列表格式,和 subfinder 一致)
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
yaml.dump(config, f, default_flow_style=False, allow_unicode=True)
|
||||
|
||||
# 设置文件权限为 600(仅所有者可读写)
|
||||
os.chmod(output_path, 0o600)
|
||||
|
||||
logger.info(f"Provider 配置文件已生成: {output_path}")
|
||||
return str(output_path)
|
||||
|
||||
def _build_provider_value(self, provider: str, config: dict) -> Optional[str]:
|
||||
"""根据 provider 格式规则构建配置值
|
||||
|
||||
Args:
|
||||
provider: provider 名称
|
||||
config: provider 配置字典
|
||||
|
||||
Returns:
|
||||
构建的配置值字符串,如果配置不完整则返回 None
|
||||
"""
|
||||
format_info = self.PROVIDER_FORMATS.get(provider)
|
||||
if not format_info:
|
||||
return None
|
||||
|
||||
if format_info['type'] == 'composite':
|
||||
# 复合格式:需要多个字段
|
||||
format_str = format_info['format']
|
||||
try:
|
||||
# 提取格式字符串中的字段名
|
||||
# 例如 '{email}:{api_key}' -> ['email', 'api_key']
|
||||
import re
|
||||
fields = re.findall(r'\{(\w+)\}', format_str)
|
||||
|
||||
# 检查所有字段是否都有值
|
||||
values = {}
|
||||
for field in fields:
|
||||
value = config.get(field, '').strip()
|
||||
if not value:
|
||||
logger.debug(f"Provider {provider} 缺少字段 {field}")
|
||||
return None
|
||||
values[field] = value
|
||||
|
||||
return format_str.format(**values)
|
||||
except (KeyError, ValueError) as e:
|
||||
logger.warning(f"构建 {provider} 配置值失败: {e}")
|
||||
return None
|
||||
else:
|
||||
# 单字段格式
|
||||
field = format_info['field']
|
||||
value = config.get(field, '').strip()
|
||||
if not value:
|
||||
logger.debug(f"Provider {provider} 缺少字段 {field}")
|
||||
return None
|
||||
return value
|
||||
|
||||
def cleanup(self, config_path: str) -> None:
|
||||
"""清理配置文件
|
||||
|
||||
Args:
|
||||
config_path: 配置文件路径
|
||||
"""
|
||||
try:
|
||||
if config_path and Path(config_path).exists():
|
||||
Path(config_path).unlink()
|
||||
logger.debug(f"已清理配置文件: {config_path}")
|
||||
except Exception as e:
|
||||
logger.warning(f"清理配置文件失败: {config_path} - {e}")
|
||||
@@ -10,37 +10,58 @@
|
||||
import ipaddress
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Iterator
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from .blacklist_service import BlacklistService
|
||||
from apps.common.utils import BlacklistFilter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_export_service(target_id: int) -> 'TargetExportService':
|
||||
"""
|
||||
工厂函数:创建带黑名单过滤的导出服务
|
||||
|
||||
Args:
|
||||
target_id: 目标 ID,用于加载黑名单规则
|
||||
|
||||
Returns:
|
||||
TargetExportService: 配置好黑名单过滤器的导出服务实例
|
||||
"""
|
||||
from apps.common.services import BlacklistService
|
||||
|
||||
rules = BlacklistService().get_rules(target_id)
|
||||
blacklist_filter = BlacklistFilter(rules)
|
||||
return TargetExportService(blacklist_filter=blacklist_filter)
|
||||
|
||||
|
||||
class TargetExportService:
|
||||
"""
|
||||
目标导出服务 - 提供统一的目标提取和文件导出功能
|
||||
|
||||
使用方式:
|
||||
# Task 层决定数据源
|
||||
queryset = WebSite.objects.filter(target_id=target_id).values_list('url', flat=True)
|
||||
from apps.common.services import BlacklistService
|
||||
from apps.common.utils import BlacklistFilter
|
||||
|
||||
# 获取规则并创建过滤器
|
||||
blacklist_service = BlacklistService()
|
||||
rules = blacklist_service.get_rules(target_id)
|
||||
blacklist_filter = BlacklistFilter(rules)
|
||||
|
||||
# 使用导出服务
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
export_service = TargetExportService(blacklist_filter=blacklist_filter)
|
||||
result = export_service.export_urls(target_id, output_path, queryset)
|
||||
"""
|
||||
|
||||
def __init__(self, blacklist_service: Optional[BlacklistService] = None):
|
||||
def __init__(self, blacklist_filter: Optional[BlacklistFilter] = None):
|
||||
"""
|
||||
初始化导出服务
|
||||
|
||||
Args:
|
||||
blacklist_service: 黑名单过滤服务,None 表示禁用过滤
|
||||
blacklist_filter: 黑名单过滤器,None 表示禁用过滤
|
||||
"""
|
||||
self.blacklist_service = blacklist_service
|
||||
self.blacklist_filter = blacklist_filter
|
||||
|
||||
def export_urls(
|
||||
self,
|
||||
@@ -79,19 +100,15 @@ class TargetExportService:
|
||||
|
||||
logger.info("开始导出 URL - target_id=%s, output=%s", target_id, output_path)
|
||||
|
||||
# 应用黑名单过滤(数据库层面)
|
||||
if self.blacklist_service:
|
||||
# 注意:queryset 应该是原始 queryset,不是 values_list
|
||||
# 这里假设 Task 层传入的是 values_list,需要在 Task 层处理过滤
|
||||
pass
|
||||
|
||||
total_count = 0
|
||||
filtered_count = 0
|
||||
try:
|
||||
with open(output_file, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
for url in queryset.iterator(chunk_size=batch_size):
|
||||
if url:
|
||||
# Python 层面黑名单过滤
|
||||
if self.blacklist_service and not self.blacklist_service.filter_url(url):
|
||||
# 黑名单过滤
|
||||
if self.blacklist_filter and not self.blacklist_filter.is_allowed(url):
|
||||
filtered_count += 1
|
||||
continue
|
||||
f.write(f"{url}\n")
|
||||
total_count += 1
|
||||
@@ -102,6 +119,9 @@ class TargetExportService:
|
||||
logger.error("文件写入失败: %s - %s", output_path, e)
|
||||
raise
|
||||
|
||||
if filtered_count > 0:
|
||||
logger.info("黑名单过滤: 过滤 %d 个 URL", filtered_count)
|
||||
|
||||
# 默认值回退模式
|
||||
if total_count == 0:
|
||||
total_count = self._generate_default_urls(target_id, output_file)
|
||||
@@ -206,18 +226,18 @@ class TargetExportService:
|
||||
|
||||
def _should_write_url(self, url: str) -> bool:
|
||||
"""检查 URL 是否应该写入(通过黑名单过滤)"""
|
||||
if self.blacklist_service:
|
||||
return self.blacklist_service.filter_url(url)
|
||||
if self.blacklist_filter:
|
||||
return self.blacklist_filter.is_allowed(url)
|
||||
return True
|
||||
|
||||
def export_targets(
|
||||
def export_hosts(
|
||||
self,
|
||||
target_id: int,
|
||||
output_path: str,
|
||||
batch_size: int = 1000
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
域名/IP 导出函数(用于端口扫描)
|
||||
主机列表导出函数(用于端口扫描)
|
||||
|
||||
根据 Target 类型选择导出逻辑:
|
||||
- DOMAIN: 从 Subdomain 表流式导出子域名
|
||||
@@ -255,7 +275,7 @@ class TargetExportService:
|
||||
target_name = target.name
|
||||
|
||||
logger.info(
|
||||
"开始导出扫描目标 - Target ID: %d, Name: %s, Type: %s, 输出文件: %s",
|
||||
"开始导出主机列表 - Target ID: %d, Name: %s, Type: %s, 输出文件: %s",
|
||||
target_id, target_name, target_type, output_path
|
||||
)
|
||||
|
||||
@@ -277,7 +297,7 @@ class TargetExportService:
|
||||
raise ValueError(f"不支持的目标类型: {target_type}")
|
||||
|
||||
logger.info(
|
||||
"✓ 扫描目标导出完成 - 类型: %s, 总数: %d, 文件: %s",
|
||||
"✓ 主机列表导出完成 - 类型: %s, 总数: %d, 文件: %s",
|
||||
type_desc, total_count, output_path
|
||||
)
|
||||
|
||||
@@ -295,7 +315,7 @@ class TargetExportService:
|
||||
output_path: Path,
|
||||
batch_size: int
|
||||
) -> int:
|
||||
"""导出域名类型目标的子域名"""
|
||||
"""导出域名类型目标的根域名 + 子域名"""
|
||||
from apps.asset.services.asset.subdomain_service import SubdomainService
|
||||
|
||||
subdomain_service = SubdomainService()
|
||||
@@ -305,23 +325,27 @@ class TargetExportService:
|
||||
)
|
||||
|
||||
total_count = 0
|
||||
written_domains = set() # 去重(子域名表可能已包含根域名)
|
||||
|
||||
with open(output_path, 'w', encoding='utf-8', buffering=8192) as f:
|
||||
# 1. 先写入根域名
|
||||
if self._should_write_target(target_name):
|
||||
f.write(f"{target_name}\n")
|
||||
written_domains.add(target_name)
|
||||
total_count += 1
|
||||
|
||||
# 2. 再写入子域名(跳过已写入的根域名)
|
||||
for domain_name in domain_iterator:
|
||||
if domain_name in written_domains:
|
||||
continue
|
||||
if self._should_write_target(domain_name):
|
||||
f.write(f"{domain_name}\n")
|
||||
written_domains.add(domain_name)
|
||||
total_count += 1
|
||||
|
||||
if total_count % 10000 == 0:
|
||||
logger.info("已导出 %d 个域名...", total_count)
|
||||
|
||||
# 默认值模式:如果没有子域名,使用根域名
|
||||
if total_count == 0:
|
||||
logger.info("采用默认域名:%s (target_id=%d)", target_name, target_id)
|
||||
if self._should_write_target(target_name):
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
f.write(f"{target_name}\n")
|
||||
total_count = 1
|
||||
|
||||
return total_count
|
||||
|
||||
def _export_ip(self, target_name: str, output_path: Path) -> int:
|
||||
@@ -359,6 +383,6 @@ class TargetExportService:
|
||||
|
||||
def _should_write_target(self, target: str) -> bool:
|
||||
"""检查目标是否应该写入(通过黑名单过滤)"""
|
||||
if self.blacklist_service:
|
||||
return self.blacklist_service.filter_url(target)
|
||||
if self.blacklist_filter:
|
||||
return self.blacklist_filter.is_allowed(target)
|
||||
return True
|
||||
|
||||
@@ -8,7 +8,8 @@ import logging
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.models import WebSite
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services import TargetExportService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -49,9 +50,8 @@ def export_sites_task(
|
||||
# 构建数据源 queryset(Task 层决定数据源)
|
||||
queryset = WebSite.objects.filter(target_id=target_id).values_list('url', flat=True)
|
||||
|
||||
# 使用 TargetExportService 处理导出
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
# 使用工厂函数创建导出服务
|
||||
export_service = create_export_service(target_id)
|
||||
|
||||
result = export_service.export_urls(
|
||||
target_id=target_id,
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.models import WebSite
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,9 +46,8 @@ def export_urls_for_fingerprint_task(
|
||||
# 构建数据源 queryset(Task 层决定数据源)
|
||||
queryset = WebSite.objects.filter(target_id=target_id).values_list('url', flat=True)
|
||||
|
||||
# 使用 TargetExportService 处理导出
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
# 使用工厂函数创建导出服务
|
||||
export_service = create_export_service(target_id)
|
||||
|
||||
result = export_service.export_urls(
|
||||
target_id=target_id,
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
提供端口扫描流程所需的原子化任务
|
||||
"""
|
||||
|
||||
from .export_scan_targets_task import export_scan_targets_task
|
||||
from .export_hosts_task import export_hosts_task
|
||||
from .run_and_stream_save_ports_task import run_and_stream_save_ports_task
|
||||
from .types import PortScanRecord
|
||||
|
||||
__all__ = [
|
||||
'export_scan_targets_task',
|
||||
'export_hosts_task',
|
||||
'run_and_stream_save_ports_task',
|
||||
'PortScanRecord',
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
导出扫描目标到 TXT 文件的 Task
|
||||
导出主机列表到 TXT 文件的 Task
|
||||
|
||||
使用 TargetExportService.export_targets() 统一处理导出逻辑
|
||||
使用 TargetExportService.export_hosts() 统一处理导出逻辑
|
||||
|
||||
根据 Target 类型决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名
|
||||
@@ -11,19 +11,19 @@
|
||||
import logging
|
||||
from prefect import task
|
||||
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@task(name="export_scan_targets")
|
||||
def export_scan_targets_task(
|
||||
@task(name="export_hosts")
|
||||
def export_hosts_task(
|
||||
target_id: int,
|
||||
output_file: str,
|
||||
batch_size: int = 1000
|
||||
) -> dict:
|
||||
"""
|
||||
导出扫描目标到 TXT 文件
|
||||
导出主机列表到 TXT 文件
|
||||
|
||||
根据 Target 类型自动决定导出内容:
|
||||
- DOMAIN: 从 Subdomain 表导出子域名(流式处理,支持 10万+ 域名)
|
||||
@@ -47,11 +47,10 @@ def export_scan_targets_task(
|
||||
ValueError: Target 不存在
|
||||
IOError: 文件写入失败
|
||||
"""
|
||||
# 使用 TargetExportService 处理导出
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
# 使用工厂函数创建导出服务
|
||||
export_service = create_export_service(target_id)
|
||||
|
||||
result = export_service.export_targets(
|
||||
result = export_service.export_hosts(
|
||||
target_id=target_id,
|
||||
output_path=output_file,
|
||||
batch_size=batch_size
|
||||
@@ -14,7 +14,9 @@ from pathlib import Path
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.services import HostPortMappingService
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
from apps.common.services import BlacklistService
|
||||
from apps.common.utils import BlacklistFilter
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -80,8 +82,8 @@ def export_site_urls_task(
|
||||
output_path = Path(output_file)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# 初始化黑名单服务
|
||||
blacklist_service = BlacklistService()
|
||||
# 获取规则并创建过滤器
|
||||
blacklist_filter = BlacklistFilter(BlacklistService().get_rules(target_id))
|
||||
|
||||
# 直接查询 HostPortMapping 表,按 host 排序
|
||||
service = HostPortMappingService()
|
||||
@@ -100,11 +102,14 @@ def export_site_urls_task(
|
||||
host = assoc['host']
|
||||
port = assoc['port']
|
||||
|
||||
# 先校验 host,通过了再生成 URL
|
||||
if not blacklist_filter.is_allowed(host):
|
||||
continue
|
||||
|
||||
# 根据端口号生成URL
|
||||
for url in _generate_urls_from_port(host, port):
|
||||
if blacklist_service.filter_url(url):
|
||||
f.write(f"{url}\n")
|
||||
total_urls += 1
|
||||
f.write(f"{url}\n")
|
||||
total_urls += 1
|
||||
|
||||
if association_count % 1000 == 0:
|
||||
logger.info("已处理 %d 条关联,生成 %d 个URL...", association_count, total_urls)
|
||||
@@ -114,9 +119,9 @@ def export_site_urls_task(
|
||||
association_count, total_urls, str(output_path)
|
||||
)
|
||||
|
||||
# 默认值回退模式:使用 TargetExportService
|
||||
# 默认值回退模式:使用工厂函数创建导出服务
|
||||
if total_urls == 0:
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
export_service = create_export_service(target_id)
|
||||
total_urls = export_service._generate_default_urls(target_id, output_path)
|
||||
|
||||
return {
|
||||
|
||||
@@ -111,6 +111,7 @@ def save_domains_task(
|
||||
continue
|
||||
|
||||
# 只有通过验证的域名才添加到批次和计数
|
||||
# 注意:不在此处过滤黑名单,最大化资产发现
|
||||
batch.append(domain)
|
||||
total_domains += 1
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from prefect import task
|
||||
from typing import Optional
|
||||
|
||||
from apps.asset.models import WebSite
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -56,9 +56,8 @@ def export_sites_task(
|
||||
# 构建数据源 queryset(Task 层决定数据源)
|
||||
queryset = WebSite.objects.filter(target_id=target_id).values_list('url', flat=True)
|
||||
|
||||
# 使用 TargetExportService 处理导出
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
# 使用工厂函数创建导出服务
|
||||
export_service = create_export_service(target_id)
|
||||
|
||||
result = export_service.export_urls(
|
||||
target_id=target_id,
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Dict, Optional
|
||||
from prefect import task
|
||||
|
||||
from apps.asset.models import Endpoint
|
||||
from apps.scan.services import TargetExportService, BlacklistService
|
||||
from apps.scan.services.target_export_service import create_export_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,9 +46,8 @@ def export_endpoints_task(
|
||||
# 构建数据源 queryset(Task 层决定数据源)
|
||||
queryset = Endpoint.objects.filter(target_id=target_id).values_list('url', flat=True)
|
||||
|
||||
# 使用 TargetExportService 处理导出
|
||||
blacklist_service = BlacklistService()
|
||||
export_service = TargetExportService(blacklist_service=blacklist_service)
|
||||
# 使用工厂函数创建导出服务
|
||||
export_service = create_export_service(target_id)
|
||||
|
||||
result = export_service.export_urls(
|
||||
target_id=target_id,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.urls import path, include
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from .views import ScanViewSet, ScheduledScanViewSet, ScanLogListView
|
||||
from .views import ScanViewSet, ScheduledScanViewSet, ScanLogListView, SubfinderProviderSettingsView
|
||||
from .notifications.views import notification_callback
|
||||
from apps.asset.views import (
|
||||
SubdomainSnapshotViewSet, WebsiteSnapshotViewSet, DirectorySnapshotViewSet,
|
||||
@@ -31,6 +31,8 @@ urlpatterns = [
|
||||
path('', include(router.urls)),
|
||||
# Worker 回调 API
|
||||
path('callbacks/notification/', notification_callback, name='notification-callback'),
|
||||
# API Key 配置
|
||||
path('settings/api-keys/', SubfinderProviderSettingsView.as_view(), name='subfinder-provider-settings'),
|
||||
# 扫描日志 API
|
||||
path('scans/<int:scan_id>/logs/', ScanLogListView.as_view(), name='scan-logs-list'),
|
||||
# 嵌套路由:/api/scans/{scan_pk}/xxx/
|
||||
|
||||
@@ -48,7 +48,7 @@ ENABLE_COMMAND_LOGGING = getattr(settings, 'ENABLE_COMMAND_LOGGING', True)
|
||||
# 动态并发控制阈值(可在 Django settings 中覆盖)
|
||||
SCAN_CPU_HIGH = getattr(settings, 'SCAN_CPU_HIGH', 90.0) # CPU 高水位(百分比)
|
||||
SCAN_MEM_HIGH = getattr(settings, 'SCAN_MEM_HIGH', 80.0) # 内存高水位(百分比)
|
||||
SCAN_LOAD_CHECK_INTERVAL = getattr(settings, 'SCAN_LOAD_CHECK_INTERVAL', 30) # 负载检查间隔(秒)
|
||||
SCAN_LOAD_CHECK_INTERVAL = getattr(settings, 'SCAN_LOAD_CHECK_INTERVAL', 180) # 负载检查间隔(秒)
|
||||
SCAN_COMMAND_STARTUP_DELAY = getattr(settings, 'SCAN_COMMAND_STARTUP_DELAY', 5) # 命令启动前等待(秒)
|
||||
|
||||
_ACTIVE_COMMANDS = 0
|
||||
@@ -74,7 +74,7 @@ def _wait_for_system_load() -> None:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"系统负载较高,暂缓启动: cpu=%.1f%% (阈值 %.1f%%), mem=%.1f%% (阈值 %.1f%%)",
|
||||
"系统负载较高,任务将排队执行,防止oom: cpu=%.1f%% (阈值 %.1f%%), mem=%.1f%% (阈值 %.1f%%)",
|
||||
cpu,
|
||||
SCAN_CPU_HIGH,
|
||||
mem,
|
||||
|
||||
@@ -3,9 +3,11 @@
|
||||
from .scan_views import ScanViewSet
|
||||
from .scheduled_scan_views import ScheduledScanViewSet
|
||||
from .scan_log_views import ScanLogListView
|
||||
from .subfinder_provider_settings_views import SubfinderProviderSettingsView
|
||||
|
||||
__all__ = [
|
||||
'ScanViewSet',
|
||||
'ScheduledScanViewSet',
|
||||
'ScanLogListView',
|
||||
'SubfinderProviderSettingsView',
|
||||
]
|
||||
|
||||
@@ -3,6 +3,7 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.exceptions import NotFound, APIException
|
||||
from rest_framework.filters import SearchFilter
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db.utils import DatabaseError, IntegrityError, OperationalError
|
||||
import logging
|
||||
@@ -33,7 +34,8 @@ class ScanViewSet(viewsets.ModelViewSet):
|
||||
"""扫描任务视图集"""
|
||||
serializer_class = ScanSerializer
|
||||
pagination_class = BasePagination
|
||||
filter_backends = [SearchFilter]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter]
|
||||
filterset_fields = ['target'] # 支持 ?target=123 过滤
|
||||
search_fields = ['target__name'] # 按目标名称搜索
|
||||
|
||||
def get_queryset(self):
|
||||
|
||||
@@ -37,6 +37,11 @@ class ScheduledScanViewSet(viewsets.ModelViewSet):
|
||||
- PUT /scheduled-scans/{id}/ 更新定时扫描
|
||||
- DELETE /scheduled-scans/{id}/ 删除定时扫描
|
||||
- POST /scheduled-scans/{id}/toggle/ 切换启用状态
|
||||
|
||||
查询参数:
|
||||
- target_id: 按目标 ID 过滤
|
||||
- organization_id: 按组织 ID 过滤
|
||||
- search: 按名称搜索
|
||||
"""
|
||||
|
||||
queryset = ScheduledScan.objects.all().order_by('-created_at')
|
||||
@@ -49,6 +54,19 @@ class ScheduledScanViewSet(viewsets.ModelViewSet):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.service = ScheduledScanService()
|
||||
|
||||
def get_queryset(self):
|
||||
"""支持按 target_id 和 organization_id 过滤"""
|
||||
queryset = super().get_queryset()
|
||||
target_id = self.request.query_params.get('target_id')
|
||||
organization_id = self.request.query_params.get('organization_id')
|
||||
|
||||
if target_id:
|
||||
queryset = queryset.filter(target_id=target_id)
|
||||
if organization_id:
|
||||
queryset = queryset.filter(organization_id=organization_id)
|
||||
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
"""根据 action 返回不同的序列化器"""
|
||||
if self.action == 'create':
|
||||
|
||||
38
backend/apps/scan/views/subfinder_provider_settings_views.py
Normal file
38
backend/apps/scan/views/subfinder_provider_settings_views.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Subfinder Provider 配置视图"""
|
||||
|
||||
import logging
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
from ..models import SubfinderProviderSettings
|
||||
from ..serializers import SubfinderProviderSettingsSerializer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SubfinderProviderSettingsView(APIView):
|
||||
"""Subfinder Provider 配置视图
|
||||
|
||||
GET /api/settings/api-keys/ - 获取配置
|
||||
PUT /api/settings/api-keys/ - 更新配置
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
"""获取 Subfinder Provider 配置"""
|
||||
settings = SubfinderProviderSettings.get_instance()
|
||||
serializer = SubfinderProviderSettingsSerializer(settings.providers)
|
||||
return Response(serializer.data)
|
||||
|
||||
def put(self, request):
|
||||
"""更新 Subfinder Provider 配置"""
|
||||
serializer = SubfinderProviderSettingsSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
settings = SubfinderProviderSettings.get_instance()
|
||||
settings.providers.update(serializer.validated_data)
|
||||
settings.save()
|
||||
|
||||
logger.info("Subfinder Provider 配置已更新")
|
||||
|
||||
return Response(SubfinderProviderSettingsSerializer(settings.providers).data)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.2.7 on 2026-01-02 04:45
|
||||
# Generated by Django 5.2.7 on 2026-01-06 00:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
@@ -99,31 +99,6 @@ class TargetService:
|
||||
|
||||
# ==================== 创建操作 ====================
|
||||
|
||||
def create_or_get_target(
|
||||
self,
|
||||
name: str,
|
||||
target_type: str
|
||||
) -> Tuple[Target, bool]:
|
||||
"""
|
||||
创建或获取目标
|
||||
|
||||
Args:
|
||||
name: 目标名称
|
||||
target_type: 目标类型
|
||||
|
||||
Returns:
|
||||
(Target对象, 是否新创建)
|
||||
"""
|
||||
logger.debug("创建或获取目标 - Name: %s, Type: %s", name, target_type)
|
||||
target, created = self.repo.get_or_create(name, target_type)
|
||||
|
||||
if created:
|
||||
logger.info("创建新目标 - ID: %s, Name: %s", target.id, name)
|
||||
else:
|
||||
logger.debug("目标已存在 - ID: %s, Name: %s", target.id, name)
|
||||
|
||||
return target, created
|
||||
|
||||
def batch_create_targets(
|
||||
self,
|
||||
targets_data: List[Dict[str, Any]],
|
||||
|
||||
@@ -11,6 +11,8 @@ from .services.target_service import TargetService
|
||||
from .services.organization_service import OrganizationService
|
||||
from apps.common.pagination import BasePagination
|
||||
from apps.common.response_helpers import success_response
|
||||
from apps.common.models import BlacklistRule
|
||||
from apps.common.serializers import TargetBlacklistRuleSerializer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -405,3 +407,48 @@ class TargetViewSet(viewsets.ModelViewSet):
|
||||
# GET /api/targets/{id}/ip-addresses/ -> HostPortMappingViewSet
|
||||
# GET /api/targets/{id}/ip-addresses/export/ -> HostPortMappingViewSet.export
|
||||
# GET /api/targets/{id}/vulnerabilities/ -> VulnerabilityViewSet
|
||||
|
||||
# ==================== 黑名单管理 ====================
|
||||
|
||||
@action(detail=True, methods=['get', 'put'], url_path='blacklist')
|
||||
def blacklist(self, request, pk=None):
|
||||
"""
|
||||
Target 黑名单规则管理
|
||||
|
||||
GET /api/targets/{id}/blacklist/ - 获取 Target 黑名单列表
|
||||
PUT /api/targets/{id}/blacklist/ - 全量替换规则(文本框保存场景)
|
||||
|
||||
设计说明:
|
||||
- 使用 PUT 全量替换模式,适合"文本框每行一个规则"的前端场景
|
||||
- 用户编辑文本框 -> 点击保存 -> 后端全量替换
|
||||
|
||||
架构:MVS 模式
|
||||
- View: 参数验证、响应格式化
|
||||
- Service: 业务逻辑(BlacklistService)
|
||||
- Model: 数据持久化(BlacklistRule)
|
||||
"""
|
||||
from apps.common.services import BlacklistService
|
||||
|
||||
target = self.get_object()
|
||||
blacklist_service = BlacklistService()
|
||||
|
||||
if request.method == 'GET':
|
||||
# 获取 Target 的黑名单规则
|
||||
rules = blacklist_service.get_target_rules(target.id)
|
||||
patterns = list(rules.values_list('pattern', flat=True))
|
||||
return success_response(data={'patterns': patterns})
|
||||
|
||||
elif request.method == 'PUT':
|
||||
# 全量替换
|
||||
patterns = request.data.get('patterns', [])
|
||||
|
||||
if not isinstance(patterns, list):
|
||||
return Response(
|
||||
{'error': {'code': 'VALIDATION_ERROR', 'message': 'patterns 必须是数组'}},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 调用 Service 层全量替换
|
||||
result = blacklist_service.replace_target_rules(target, patterns)
|
||||
|
||||
return success_response(data=result)
|
||||
|
||||
@@ -51,6 +51,7 @@ INSTALLED_APPS = [
|
||||
'django.contrib.staticfiles',
|
||||
# 第三方应用
|
||||
'rest_framework',
|
||||
'django_filters', # DRF 过滤器支持
|
||||
'drf_yasg',
|
||||
'corsheaders',
|
||||
'channels', # WebSocket 支持
|
||||
|
||||
@@ -11,6 +11,9 @@ setuptools==75.6.0
|
||||
# CORS 支持
|
||||
django-cors-headers==4.3.1
|
||||
|
||||
# 过滤器支持
|
||||
django-filter==24.3
|
||||
|
||||
# 环境变量管理
|
||||
python-dotenv==1.0.1
|
||||
|
||||
|
||||
@@ -63,11 +63,7 @@ wait_for_server() {
|
||||
run_migrations() {
|
||||
log_step "执行数据库迁移..."
|
||||
|
||||
# 开发环境:先 makemigrations
|
||||
if [ "$DEV_MODE" = "true" ]; then
|
||||
docker compose exec -T server python backend/manage.py makemigrations --noinput 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# 迁移文件应手动生成并提交到仓库,这里只执行 migrate
|
||||
docker compose exec -T server python backend/manage.py migrate --noinput
|
||||
log_info "数据库迁移完成"
|
||||
}
|
||||
|
||||
@@ -3,26 +3,21 @@ set -e
|
||||
|
||||
echo "[START] 启动 XingRin Server..."
|
||||
|
||||
# 1. 生成和迁移数据库
|
||||
echo " [1/3] 生成数据库迁移文件..."
|
||||
# 1. 执行数据库迁移(迁移文件应提交到仓库,这里只执行 migrate)
|
||||
echo " [1/3] 执行数据库迁移..."
|
||||
cd /app/backend
|
||||
python manage.py makemigrations
|
||||
echo " ✓ 迁移文件生成完成"
|
||||
|
||||
echo " [1.1/3] 执行数据库迁移..."
|
||||
python manage.py migrate --noinput
|
||||
echo " ✓ 数据库迁移完成"
|
||||
|
||||
echo " [1.2/3] 初始化默认扫描引擎..."
|
||||
echo " [1.1/3] 初始化默认扫描引擎..."
|
||||
python manage.py init_default_engine
|
||||
echo " ✓ 默认扫描引擎已就绪"
|
||||
|
||||
echo " [1.3/3] 初始化默认目录字典..."
|
||||
echo " [1.2/3] 初始化默认目录字典..."
|
||||
python manage.py init_wordlists
|
||||
echo " ✓ 默认目录字典已就绪"
|
||||
|
||||
|
||||
echo " [1.4/3] 初始化默认指纹库..."
|
||||
echo " [1.3/3] 初始化默认指纹库..."
|
||||
python manage.py init_fingerprints
|
||||
echo " ✓ 默认指纹库已就绪"
|
||||
|
||||
|
||||
@@ -155,7 +155,11 @@ fi
|
||||
echo -e "${GREEN}[OK]${NC} 服务已启动"
|
||||
|
||||
# 数据初始化
|
||||
./scripts/init-data.sh
|
||||
if [ "$DEV_MODE" = true ]; then
|
||||
./scripts/init-data.sh --dev
|
||||
else
|
||||
./scripts/init-data.sh
|
||||
fi
|
||||
|
||||
# 静默模式下不显示结果(由调用方显示)
|
||||
if [ "$QUIET_MODE" = true ]; then
|
||||
|
||||
306
frontend/app/[locale]/settings/api-keys/page.tsx
Normal file
306
frontend/app/[locale]/settings/api-keys/page.tsx
Normal file
@@ -0,0 +1,306 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState, useEffect } from 'react'
|
||||
import { IconEye, IconEyeOff, IconWorldSearch, IconRadar2 } from '@tabler/icons-react'
|
||||
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Switch } from '@/components/ui/switch'
|
||||
import { Separator } from '@/components/ui/separator'
|
||||
import { Badge } from '@/components/ui/badge'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { useApiKeySettings, useUpdateApiKeySettings } from '@/hooks/use-api-key-settings'
|
||||
import type { ApiKeySettings } from '@/types/api-key-settings.types'
|
||||
|
||||
// 密码输入框组件(带显示/隐藏切换)
|
||||
function PasswordInput({ value, onChange, placeholder, disabled }: {
|
||||
value: string
|
||||
onChange: (value: string) => void
|
||||
placeholder?: string
|
||||
disabled?: boolean
|
||||
}) {
|
||||
const [show, setShow] = useState(false)
|
||||
return (
|
||||
<div className="relative">
|
||||
<Input
|
||||
type={show ? 'text' : 'password'}
|
||||
value={value}
|
||||
onChange={(e) => onChange(e.target.value)}
|
||||
placeholder={placeholder}
|
||||
disabled={disabled}
|
||||
className="pr-10"
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShow(!show)}
|
||||
className="absolute right-3 top-1/2 -translate-y-1/2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
{show ? <IconEyeOff className="h-4 w-4" /> : <IconEye className="h-4 w-4" />}
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Provider 配置定义
|
||||
const PROVIDERS = [
|
||||
{
|
||||
key: 'fofa',
|
||||
name: 'FOFA',
|
||||
description: '网络空间测绘平台,提供全球互联网资产搜索',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-blue-500',
|
||||
bgColor: 'bg-blue-500/10',
|
||||
fields: [
|
||||
{ name: 'email', label: '邮箱', type: 'text', placeholder: 'your@email.com' },
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 FOFA API Key' },
|
||||
],
|
||||
docUrl: 'https://fofa.info/api',
|
||||
},
|
||||
{
|
||||
key: 'hunter',
|
||||
name: 'Hunter (鹰图)',
|
||||
description: '奇安信威胁情报平台,提供网络空间资产测绘',
|
||||
icon: IconRadar2,
|
||||
color: 'text-orange-500',
|
||||
bgColor: 'bg-orange-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Hunter API Key' },
|
||||
],
|
||||
docUrl: 'https://hunter.qianxin.com/',
|
||||
},
|
||||
{
|
||||
key: 'shodan',
|
||||
name: 'Shodan',
|
||||
description: '全球最大的互联网设备搜索引擎',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-red-500',
|
||||
bgColor: 'bg-red-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Shodan API Key' },
|
||||
],
|
||||
docUrl: 'https://developer.shodan.io/',
|
||||
},
|
||||
{
|
||||
key: 'censys',
|
||||
name: 'Censys',
|
||||
description: '互联网资产搜索和监控平台',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-purple-500',
|
||||
bgColor: 'bg-purple-500/10',
|
||||
fields: [
|
||||
{ name: 'apiId', label: 'API ID', type: 'text', placeholder: '输入 Censys API ID' },
|
||||
{ name: 'apiSecret', label: 'API Secret', type: 'password', placeholder: '输入 Censys API Secret' },
|
||||
],
|
||||
docUrl: 'https://search.censys.io/api',
|
||||
},
|
||||
{
|
||||
key: 'zoomeye',
|
||||
name: 'ZoomEye (钟馗之眼)',
|
||||
description: '知道创宇网络空间搜索引擎',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-green-500',
|
||||
bgColor: 'bg-green-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 ZoomEye API Key' },
|
||||
],
|
||||
docUrl: 'https://www.zoomeye.org/doc',
|
||||
},
|
||||
{
|
||||
key: 'securitytrails',
|
||||
name: 'SecurityTrails',
|
||||
description: 'DNS 历史记录和子域名数据平台',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-cyan-500',
|
||||
bgColor: 'bg-cyan-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 SecurityTrails API Key' },
|
||||
],
|
||||
docUrl: 'https://securitytrails.com/corp/api',
|
||||
},
|
||||
{
|
||||
key: 'threatbook',
|
||||
name: 'ThreatBook (微步在线)',
|
||||
description: '威胁情报平台,提供域名和 IP 情报查询',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-indigo-500',
|
||||
bgColor: 'bg-indigo-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 ThreatBook API Key' },
|
||||
],
|
||||
docUrl: 'https://x.threatbook.com/api',
|
||||
},
|
||||
{
|
||||
key: 'quake',
|
||||
name: 'Quake (360)',
|
||||
description: '360 网络空间测绘系统',
|
||||
icon: IconWorldSearch,
|
||||
color: 'text-teal-500',
|
||||
bgColor: 'bg-teal-500/10',
|
||||
fields: [
|
||||
{ name: 'apiKey', label: 'API Key', type: 'password', placeholder: '输入 Quake API Key' },
|
||||
],
|
||||
docUrl: 'https://quake.360.net/quake/#/help',
|
||||
},
|
||||
]
|
||||
|
||||
// 默认配置
|
||||
const DEFAULT_SETTINGS: ApiKeySettings = {
|
||||
fofa: { enabled: false, email: '', apiKey: '' },
|
||||
hunter: { enabled: false, apiKey: '' },
|
||||
shodan: { enabled: false, apiKey: '' },
|
||||
censys: { enabled: false, apiId: '', apiSecret: '' },
|
||||
zoomeye: { enabled: false, apiKey: '' },
|
||||
securitytrails: { enabled: false, apiKey: '' },
|
||||
threatbook: { enabled: false, apiKey: '' },
|
||||
quake: { enabled: false, apiKey: '' },
|
||||
}
|
||||
|
||||
export default function ApiKeysSettingsPage() {
|
||||
const { data: settings, isLoading } = useApiKeySettings()
|
||||
const updateMutation = useUpdateApiKeySettings()
|
||||
|
||||
const [formData, setFormData] = useState<ApiKeySettings>(DEFAULT_SETTINGS)
|
||||
const [hasChanges, setHasChanges] = useState(false)
|
||||
|
||||
// 当数据加载完成后,更新表单数据
|
||||
useEffect(() => {
|
||||
if (settings) {
|
||||
setFormData({ ...DEFAULT_SETTINGS, ...settings })
|
||||
setHasChanges(false)
|
||||
}
|
||||
}, [settings])
|
||||
|
||||
const updateProvider = (providerKey: string, field: string, value: any) => {
|
||||
setFormData(prev => ({
|
||||
...prev,
|
||||
[providerKey]: {
|
||||
...prev[providerKey as keyof ApiKeySettings],
|
||||
[field]: value,
|
||||
}
|
||||
}))
|
||||
setHasChanges(true)
|
||||
}
|
||||
|
||||
const handleSave = async () => {
|
||||
updateMutation.mutate(formData)
|
||||
setHasChanges(false)
|
||||
}
|
||||
|
||||
const enabledCount = Object.values(formData).filter((p: any) => p?.enabled).length
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="p-4 md:p-6 space-y-6">
|
||||
<div>
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-96 mt-2" />
|
||||
</div>
|
||||
<div className="grid gap-4">
|
||||
{[1, 2, 3].map((i) => (
|
||||
<Skeleton key={i} className="h-24 w-full" />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="p-4 md:p-6 space-y-6">
|
||||
{/* 页面标题 */}
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<h1 className="text-2xl font-semibold">API 密钥配置</h1>
|
||||
{enabledCount > 0 && (
|
||||
<Badge variant="secondary">{enabledCount} 个已启用</Badge>
|
||||
)}
|
||||
</div>
|
||||
<p className="text-muted-foreground mt-1">
|
||||
配置第三方数据源的 API 密钥,用于增强子域名发现能力。启用后将在 subfinder 扫描时自动使用。
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Provider 卡片列表 */}
|
||||
<div className="grid gap-4">
|
||||
{PROVIDERS.map((provider) => {
|
||||
const data = formData[provider.key as keyof ApiKeySettings] || {}
|
||||
const isEnabled = (data as any)?.enabled || false
|
||||
|
||||
return (
|
||||
<Card key={provider.key}>
|
||||
<CardHeader className="pb-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className={`flex h-10 w-10 items-center justify-center rounded-lg ${provider.bgColor}`}>
|
||||
<provider.icon className={`h-5 w-5 ${provider.color}`} />
|
||||
</div>
|
||||
<div>
|
||||
<div className="flex items-center gap-2">
|
||||
<CardTitle className="text-base">{provider.name}</CardTitle>
|
||||
{isEnabled && <Badge variant="outline" className="text-xs text-green-600">已启用</Badge>}
|
||||
</div>
|
||||
<CardDescription>{provider.description}</CardDescription>
|
||||
</div>
|
||||
</div>
|
||||
<Switch
|
||||
checked={isEnabled}
|
||||
onCheckedChange={(checked) => updateProvider(provider.key, 'enabled', checked)}
|
||||
/>
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
{/* 展开的配置表单 */}
|
||||
{isEnabled && (
|
||||
<CardContent className="pt-0">
|
||||
<Separator className="mb-4" />
|
||||
<div className="space-y-4">
|
||||
{provider.fields.map((field) => (
|
||||
<div key={field.name} className="space-y-2">
|
||||
<label className="text-sm font-medium">{field.label}</label>
|
||||
{field.type === 'password' ? (
|
||||
<PasswordInput
|
||||
value={(data as any)[field.name] || ''}
|
||||
onChange={(value) => updateProvider(provider.key, field.name, value)}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
) : (
|
||||
<Input
|
||||
type="text"
|
||||
value={(data as any)[field.name] || ''}
|
||||
onChange={(e) => updateProvider(provider.key, field.name, e.target.value)}
|
||||
placeholder={field.placeholder}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
<p className="text-xs text-muted-foreground">
|
||||
获取 API Key:
|
||||
<a
|
||||
href={provider.docUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-primary hover:underline ml-1"
|
||||
>
|
||||
{provider.docUrl}
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
)}
|
||||
</Card>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* 保存按钮 */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={updateMutation.isPending || !hasChanges}
|
||||
>
|
||||
{updateMutation.isPending ? '保存中...' : '保存配置'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
132
frontend/app/[locale]/settings/blacklist/page.tsx
Normal file
132
frontend/app/[locale]/settings/blacklist/page.tsx
Normal file
@@ -0,0 +1,132 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState, useEffect } from "react"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { AlertTriangle, Loader2, Ban } from "lucide-react"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Textarea } from "@/components/ui/textarea"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import { useGlobalBlacklist, useUpdateGlobalBlacklist } from "@/hooks/use-global-blacklist"
|
||||
|
||||
/**
|
||||
* Global blacklist settings page
|
||||
*/
|
||||
export default function GlobalBlacklistPage() {
|
||||
const t = useTranslations("pages.settings.blacklist")
|
||||
|
||||
const [blacklistText, setBlacklistText] = useState("")
|
||||
const [hasChanges, setHasChanges] = useState(false)
|
||||
|
||||
const { data, isLoading, error } = useGlobalBlacklist()
|
||||
const updateBlacklist = useUpdateGlobalBlacklist()
|
||||
|
||||
// Initialize text when data loads
|
||||
useEffect(() => {
|
||||
if (data?.patterns) {
|
||||
setBlacklistText(data.patterns.join("\n"))
|
||||
setHasChanges(false)
|
||||
}
|
||||
}, [data])
|
||||
|
||||
// Handle text change
|
||||
const handleTextChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setBlacklistText(e.target.value)
|
||||
setHasChanges(true)
|
||||
}
|
||||
|
||||
// Handle save
|
||||
const handleSave = () => {
|
||||
const patterns = blacklistText
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
|
||||
updateBlacklist.mutate(
|
||||
{ patterns },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setHasChanges(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col gap-4 p-4">
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-96" />
|
||||
</div>
|
||||
<Skeleton className="h-[400px] w-full" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col items-center justify-center py-12">
|
||||
<AlertTriangle className="h-10 w-10 text-destructive mb-4" />
|
||||
<p className="text-muted-foreground">{t("loadError")}</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-1 flex-col gap-4 p-4">
|
||||
{/* Page header */}
|
||||
<div>
|
||||
<h1 className="text-2xl font-bold">{t("title")}</h1>
|
||||
<p className="text-muted-foreground">{t("description")}</p>
|
||||
</div>
|
||||
|
||||
{/* Blacklist card */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex items-center gap-2">
|
||||
<Ban className="h-5 w-5 text-muted-foreground" />
|
||||
<CardTitle>{t("card.title")}</CardTitle>
|
||||
</div>
|
||||
<CardDescription>{t("card.description")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Rules hint */}
|
||||
<div className="flex flex-wrap items-center gap-x-4 gap-y-2 text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">{t("rules.title")}:</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*.gov</code> {t("rules.domain")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*cdn*</code> {t("rules.keyword")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">192.168.1.1</code> {t("rules.ip")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">10.0.0.0/8</code> {t("rules.cidr")}</span>
|
||||
</div>
|
||||
|
||||
{/* Scope hint */}
|
||||
<div className="rounded-lg border bg-muted/50 p-3 text-sm">
|
||||
<p className="text-muted-foreground">{t("scopeHint")}</p>
|
||||
</div>
|
||||
|
||||
{/* Input */}
|
||||
<Textarea
|
||||
value={blacklistText}
|
||||
onChange={handleTextChange}
|
||||
placeholder={t("placeholder")}
|
||||
className="min-h-[320px] font-mono text-sm"
|
||||
/>
|
||||
|
||||
{/* Save button */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={!hasChanges || updateBlacklist.isPending}
|
||||
>
|
||||
{updateBlacklist.isPending && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
{t("save")}
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -5,15 +5,15 @@ import { useEffect } from "react"
|
||||
|
||||
/**
|
||||
* Target detail page (compatible with old routes)
|
||||
* Automatically redirects to websites page
|
||||
* Automatically redirects to overview page
|
||||
*/
|
||||
export default function TargetDetailsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
// Redirect to websites page
|
||||
router.replace(`/target/${id}/websites/`)
|
||||
// Redirect to overview page
|
||||
router.replace(`/target/${id}/overview/`)
|
||||
}, [id, router])
|
||||
|
||||
return null
|
||||
|
||||
@@ -12,7 +12,8 @@ import { useTranslations } from "next-intl"
|
||||
|
||||
/**
|
||||
* Target detail layout
|
||||
* Provides shared target information and navigation for all sub-pages
|
||||
* Two-level navigation: Overview / Assets / Vulnerabilities
|
||||
* Assets has secondary navigation for different asset types
|
||||
*/
|
||||
export default function TargetLayout({
|
||||
children,
|
||||
@@ -30,26 +31,52 @@ export default function TargetLayout({
|
||||
error
|
||||
} = useTarget(Number(id))
|
||||
|
||||
// Get currently active tab
|
||||
const getActiveTab = () => {
|
||||
if (pathname.includes("/subdomain")) return "subdomain"
|
||||
if (pathname.includes("/endpoints")) return "endpoints"
|
||||
if (pathname.includes("/websites")) return "websites"
|
||||
if (pathname.includes("/directories")) return "directories"
|
||||
// Get primary navigation active tab
|
||||
const getPrimaryTab = () => {
|
||||
if (pathname.includes("/overview")) return "overview"
|
||||
if (pathname.includes("/vulnerabilities")) return "vulnerabilities"
|
||||
if (pathname.includes("/ip-addresses")) return "ip-addresses"
|
||||
return ""
|
||||
if (pathname.includes("/settings")) return "settings"
|
||||
// All asset pages fall under "assets"
|
||||
if (
|
||||
pathname.includes("/websites") ||
|
||||
pathname.includes("/subdomain") ||
|
||||
pathname.includes("/ip-addresses") ||
|
||||
pathname.includes("/endpoints") ||
|
||||
pathname.includes("/directories")
|
||||
) {
|
||||
return "assets"
|
||||
}
|
||||
return "overview"
|
||||
}
|
||||
|
||||
// Get secondary navigation active tab (for assets)
|
||||
const getSecondaryTab = () => {
|
||||
if (pathname.includes("/websites")) return "websites"
|
||||
if (pathname.includes("/subdomain")) return "subdomain"
|
||||
if (pathname.includes("/ip-addresses")) return "ip-addresses"
|
||||
if (pathname.includes("/endpoints")) return "endpoints"
|
||||
if (pathname.includes("/directories")) return "directories"
|
||||
return "websites"
|
||||
}
|
||||
|
||||
// Check if we should show secondary navigation
|
||||
const showSecondaryNav = getPrimaryTab() === "assets"
|
||||
|
||||
// Tab path mapping
|
||||
const basePath = `/target/${id}`
|
||||
const tabPaths = {
|
||||
subdomain: `${basePath}/subdomain/`,
|
||||
endpoints: `${basePath}/endpoints/`,
|
||||
websites: `${basePath}/websites/`,
|
||||
directories: `${basePath}/directories/`,
|
||||
const primaryPaths = {
|
||||
overview: `${basePath}/overview/`,
|
||||
assets: `${basePath}/websites/`, // Default to websites when clicking assets
|
||||
vulnerabilities: `${basePath}/vulnerabilities/`,
|
||||
settings: `${basePath}/settings/`,
|
||||
}
|
||||
|
||||
const secondaryPaths = {
|
||||
websites: `${basePath}/websites/`,
|
||||
subdomain: `${basePath}/subdomain/`,
|
||||
"ip-addresses": `${basePath}/ip-addresses/`,
|
||||
endpoints: `${basePath}/endpoints/`,
|
||||
directories: `${basePath}/directories/`,
|
||||
}
|
||||
|
||||
// Get counts for each tab from target data
|
||||
@@ -62,27 +89,24 @@ export default function TargetLayout({
|
||||
"ip-addresses": (target as any)?.summary?.ips || 0,
|
||||
}
|
||||
|
||||
// Calculate total assets count
|
||||
const totalAssets = counts.websites + counts.subdomain + counts["ip-addresses"] + counts.endpoints + counts.directories
|
||||
|
||||
// Loading state
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* Page header skeleton */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div className="w-full max-w-xl space-y-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Skeleton className="h-6 w-6 rounded-md" />
|
||||
<Skeleton className="h-7 w-48" />
|
||||
</div>
|
||||
<Skeleton className="h-4 w-72" />
|
||||
</div>
|
||||
{/* Header skeleton */}
|
||||
<div className="flex items-center gap-2 px-4 lg:px-6">
|
||||
<Skeleton className="h-4 w-16" />
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<Skeleton className="h-4 w-32" />
|
||||
</div>
|
||||
|
||||
{/* Tabs navigation skeleton */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div className="flex gap-2">
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
{/* Tabs skeleton */}
|
||||
<div className="flex gap-1 px-4 lg:px-6">
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-20" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -123,74 +147,38 @@ export default function TargetLayout({
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4 py-4 md:gap-6 md:py-6">
|
||||
{/* Page header */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight flex items-center gap-2">
|
||||
<Target />
|
||||
{target.name}
|
||||
</h2>
|
||||
<p className="text-muted-foreground">{target.description || t("noDescription")}</p>
|
||||
</div>
|
||||
{/* Header: Page label + Target name */}
|
||||
<div className="flex items-center gap-2 text-sm px-4 lg:px-6">
|
||||
<span className="text-muted-foreground">{t("breadcrumb.targetDetail")}</span>
|
||||
<span className="text-muted-foreground">/</span>
|
||||
<span className="font-medium flex items-center gap-1.5">
|
||||
<Target className="h-4 w-4" />
|
||||
{target.name}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Tabs navigation - Use Link to ensure progress bar is triggered */}
|
||||
<div className="flex items-center justify-between px-4 lg:px-6">
|
||||
<Tabs value={getActiveTab()} className="w-full">
|
||||
{/* Primary navigation */}
|
||||
<div className="px-4 lg:px-6">
|
||||
<Tabs value={getPrimaryTab()}>
|
||||
<TabsList>
|
||||
<TabsTrigger value="websites" asChild>
|
||||
<Link href={tabPaths.websites} className="flex items-center gap-0.5">
|
||||
Websites
|
||||
{counts.websites > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.websites}
|
||||
</Badge>
|
||||
)}
|
||||
<TabsTrigger value="overview" asChild>
|
||||
<Link href={primaryPaths.overview} className="flex items-center gap-0.5">
|
||||
{t("tabs.overview")}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="subdomain" asChild>
|
||||
<Link href={tabPaths.subdomain} className="flex items-center gap-0.5">
|
||||
Subdomains
|
||||
{counts.subdomain > 0 && (
|
||||
<TabsTrigger value="assets" asChild>
|
||||
<Link href={primaryPaths.assets} className="flex items-center gap-0.5">
|
||||
{t("tabs.assets")}
|
||||
{totalAssets > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.subdomain}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ip-addresses" asChild>
|
||||
<Link href={tabPaths["ip-addresses"]} className="flex items-center gap-0.5">
|
||||
IP Addresses
|
||||
{counts["ip-addresses"] > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts["ip-addresses"]}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="endpoints" asChild>
|
||||
<Link href={tabPaths.endpoints} className="flex items-center gap-0.5">
|
||||
URLs
|
||||
{counts.endpoints > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.endpoints}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="directories" asChild>
|
||||
<Link href={tabPaths.directories} className="flex items-center gap-0.5">
|
||||
Directories
|
||||
{counts.directories > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.directories}
|
||||
{totalAssets}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="vulnerabilities" asChild>
|
||||
<Link href={tabPaths.vulnerabilities} className="flex items-center gap-0.5">
|
||||
Vulnerabilities
|
||||
<Link href={primaryPaths.vulnerabilities} className="flex items-center gap-0.5">
|
||||
{t("tabs.vulnerabilities")}
|
||||
{counts.vulnerabilities > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.vulnerabilities}
|
||||
@@ -198,10 +186,75 @@ export default function TargetLayout({
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="settings" asChild>
|
||||
<Link href={primaryPaths.settings} className="flex items-center gap-0.5">
|
||||
{t("tabs.settings")}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
|
||||
{/* Secondary navigation (only for assets) */}
|
||||
{showSecondaryNav && (
|
||||
<div className="flex items-center px-4 lg:px-6">
|
||||
<Tabs value={getSecondaryTab()} className="w-full">
|
||||
<TabsList variant="underline">
|
||||
<TabsTrigger value="websites" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.websites} className="flex items-center gap-0.5">
|
||||
Websites
|
||||
{counts.websites > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.websites}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="subdomain" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.subdomain} className="flex items-center gap-0.5">
|
||||
Subdomains
|
||||
{counts.subdomain > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.subdomain}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="ip-addresses" variant="underline" asChild>
|
||||
<Link href={secondaryPaths["ip-addresses"]} className="flex items-center gap-0.5">
|
||||
IPs
|
||||
{counts["ip-addresses"] > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts["ip-addresses"]}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="endpoints" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.endpoints} className="flex items-center gap-0.5">
|
||||
URLs
|
||||
{counts.endpoints > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.endpoints}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="directories" variant="underline" asChild>
|
||||
<Link href={secondaryPaths.directories} className="flex items-center gap-0.5">
|
||||
Directories
|
||||
{counts.directories > 0 && (
|
||||
<Badge variant="secondary" className="ml-1.5 h-5 min-w-5 rounded-full px-1.5 text-xs">
|
||||
{counts.directories}
|
||||
</Badge>
|
||||
)}
|
||||
</Link>
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Sub-page content */}
|
||||
{children}
|
||||
</div>
|
||||
|
||||
19
frontend/app/[locale]/target/[id]/overview/page.tsx
Normal file
19
frontend/app/[locale]/target/[id]/overview/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { TargetOverview } from "@/components/target/target-overview"
|
||||
|
||||
/**
|
||||
* Target overview page
|
||||
* Displays target statistics and summary information
|
||||
*/
|
||||
export default function TargetOverviewPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const targetId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<TargetOverview targetId={targetId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -5,15 +5,15 @@ import { useEffect } from "react"
|
||||
|
||||
/**
|
||||
* Target detail default page
|
||||
* Automatically redirects to websites page
|
||||
* Automatically redirects to overview page
|
||||
*/
|
||||
export default function TargetDetailPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
// Redirect to websites page
|
||||
router.replace(`/target/${id}/websites/`)
|
||||
// Redirect to overview page
|
||||
router.replace(`/target/${id}/overview/`)
|
||||
}, [id, router])
|
||||
|
||||
return null
|
||||
|
||||
19
frontend/app/[locale]/target/[id]/settings/page.tsx
Normal file
19
frontend/app/[locale]/target/[id]/settings/page.tsx
Normal file
@@ -0,0 +1,19 @@
|
||||
"use client"
|
||||
|
||||
import { useParams } from "next/navigation"
|
||||
import { TargetSettings } from "@/components/target/target-settings"
|
||||
|
||||
/**
|
||||
* Target settings page
|
||||
* Contains blacklist configuration and other settings
|
||||
*/
|
||||
export default function TargetSettingsPage() {
|
||||
const { id } = useParams<{ id: string }>()
|
||||
const targetId = Number(id)
|
||||
|
||||
return (
|
||||
<div className="px-4 lg:px-6">
|
||||
<TargetSettings targetId={targetId} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -17,6 +17,8 @@ import {
|
||||
IconBug, // Vulnerability icon
|
||||
IconMessageReport, // Feedback icon
|
||||
IconSearch, // Search icon
|
||||
IconKey, // API Key icon
|
||||
IconBan, // Blacklist icon
|
||||
} from "@tabler/icons-react"
|
||||
// Import internationalization hook
|
||||
import { useTranslations } from 'next-intl'
|
||||
@@ -168,6 +170,16 @@ export function AppSidebar({ ...props }: React.ComponentProps<typeof Sidebar>) {
|
||||
url: "/settings/notifications/",
|
||||
icon: IconSettings,
|
||||
},
|
||||
{
|
||||
name: t('apiKeys'),
|
||||
url: "/settings/api-keys/",
|
||||
icon: IconKey,
|
||||
},
|
||||
{
|
||||
name: t('globalBlacklist'),
|
||||
url: "/settings/blacklist/",
|
||||
icon: IconBan,
|
||||
},
|
||||
]
|
||||
|
||||
return (
|
||||
|
||||
@@ -161,6 +161,7 @@ interface CreateColumnsProps {
|
||||
handleStop: (scan: ScanRecord) => void
|
||||
handleViewProgress?: (scan: ScanRecord) => void
|
||||
t: ScanHistoryTranslations
|
||||
hideTargetColumn?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -173,7 +174,9 @@ export const createScanHistoryColumns = ({
|
||||
handleStop,
|
||||
handleViewProgress,
|
||||
t,
|
||||
}: CreateColumnsProps): ColumnDef<ScanRecord>[] => [
|
||||
hideTargetColumn = false,
|
||||
}: CreateColumnsProps): ColumnDef<ScanRecord>[] => {
|
||||
const columns: ColumnDef<ScanRecord>[] = [
|
||||
{
|
||||
id: "select",
|
||||
size: 40,
|
||||
@@ -574,3 +577,11 @@ export const createScanHistoryColumns = ({
|
||||
enableHiding: false,
|
||||
},
|
||||
]
|
||||
|
||||
// Filter out targetName column if hideTargetColumn is true
|
||||
if (hideTargetColumn) {
|
||||
return columns.filter(col => (col as any).accessorKey !== 'targetName')
|
||||
}
|
||||
|
||||
return columns
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ interface ScanHistoryDataTableProps {
|
||||
onPaginationChange?: (pagination: { pageIndex: number; pageSize: number }) => void
|
||||
hideToolbar?: boolean
|
||||
hidePagination?: boolean
|
||||
pageSizeOptions?: number[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -50,6 +51,7 @@ export function ScanHistoryDataTable({
|
||||
onPaginationChange,
|
||||
hideToolbar = false,
|
||||
hidePagination = false,
|
||||
pageSizeOptions,
|
||||
}: ScanHistoryDataTableProps) {
|
||||
const t = useTranslations("common.status")
|
||||
const tScan = useTranslations("scan.history")
|
||||
@@ -84,6 +86,7 @@ export function ScanHistoryDataTable({
|
||||
paginationInfo={paginationInfo}
|
||||
onPaginationChange={onPaginationChange}
|
||||
hidePagination={hidePagination}
|
||||
pageSizeOptions={pageSizeOptions}
|
||||
// Selection
|
||||
onSelectionChange={onSelectionChange}
|
||||
// Bulk operations
|
||||
|
||||
@@ -31,9 +31,14 @@ import { ScanProgressDialog, buildScanProgressData, type ScanProgressData } from
|
||||
*/
|
||||
interface ScanHistoryListProps {
|
||||
hideToolbar?: boolean
|
||||
targetId?: number // Filter by target ID
|
||||
pageSize?: number // Custom page size
|
||||
hideTargetColumn?: boolean // Hide target column (useful when showing scans for a specific target)
|
||||
pageSizeOptions?: number[] // Custom page size options
|
||||
hidePagination?: boolean // Hide pagination completely
|
||||
}
|
||||
|
||||
export function ScanHistoryList({ hideToolbar = false }: ScanHistoryListProps) {
|
||||
export function ScanHistoryList({ hideToolbar = false, targetId, pageSize: customPageSize, hideTargetColumn = false, pageSizeOptions, hidePagination = false }: ScanHistoryListProps) {
|
||||
const queryClient = useQueryClient()
|
||||
const [selectedScans, setSelectedScans] = useState<ScanRecord[]>([])
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false)
|
||||
@@ -97,7 +102,7 @@ export function ScanHistoryList({ hideToolbar = false }: ScanHistoryListProps) {
|
||||
// Pagination state
|
||||
const [pagination, setPagination] = useState({
|
||||
pageIndex: 0,
|
||||
pageSize: 10,
|
||||
pageSize: customPageSize || 10,
|
||||
})
|
||||
|
||||
// Search state
|
||||
@@ -115,6 +120,7 @@ export function ScanHistoryList({ hideToolbar = false }: ScanHistoryListProps) {
|
||||
page: pagination.pageIndex + 1, // API page numbers start from 1
|
||||
pageSize: pagination.pageSize,
|
||||
search: searchQuery || undefined,
|
||||
target: targetId,
|
||||
})
|
||||
|
||||
// Reset search state when request completes
|
||||
@@ -278,8 +284,9 @@ export function ScanHistoryList({ hideToolbar = false }: ScanHistoryListProps) {
|
||||
handleStop: handleStopScan,
|
||||
handleViewProgress,
|
||||
t: translations,
|
||||
hideTargetColumn,
|
||||
}),
|
||||
[navigate, translations]
|
||||
[navigate, translations, hideTargetColumn]
|
||||
)
|
||||
|
||||
// Error handling
|
||||
@@ -330,6 +337,8 @@ export function ScanHistoryList({ hideToolbar = false }: ScanHistoryListProps) {
|
||||
}}
|
||||
onPaginationChange={handlePaginationChange}
|
||||
hideToolbar={hideToolbar}
|
||||
pageSizeOptions={pageSizeOptions}
|
||||
hidePagination={hidePagination}
|
||||
/>
|
||||
|
||||
{/* Delete confirmation dialog */}
|
||||
|
||||
@@ -104,10 +104,12 @@ export function CreateScheduledScanDialog({
|
||||
{ id: 5, title: t("steps.scheduleSettings"), icon: IconClock },
|
||||
]
|
||||
|
||||
// Preset mode: skip target selection but keep basic info for name editing
|
||||
const PRESET_STEPS = [
|
||||
{ id: 1, title: t("steps.selectEngine"), icon: IconSettings },
|
||||
{ id: 2, title: t("steps.editConfig"), icon: IconCode },
|
||||
{ id: 3, title: t("steps.scheduleSettings"), icon: IconClock },
|
||||
{ id: 1, title: t("steps.basicInfo"), icon: IconInfoCircle },
|
||||
{ id: 2, title: t("steps.selectEngine"), icon: IconSettings },
|
||||
{ id: 3, title: t("steps.editConfig"), icon: IconCode },
|
||||
{ id: 4, title: t("steps.scheduleSettings"), icon: IconClock },
|
||||
]
|
||||
|
||||
const [orgSearchInput, setOrgSearchInput] = React.useState("")
|
||||
@@ -240,15 +242,18 @@ export function CreateScheduledScanDialog({
|
||||
const validateCurrentStep = (): boolean => {
|
||||
if (hasPreset) {
|
||||
switch (currentStep) {
|
||||
case 1: // Select engine
|
||||
case 1: // Basic info (preset mode)
|
||||
if (!name.trim()) { toast.error(t("form.taskNameRequired")); return false }
|
||||
return true
|
||||
case 2: // Select engine
|
||||
if (!selectedPresetId) { toast.error(t("form.scanEngineRequired")); return false }
|
||||
if (engineIds.length === 0) { toast.error(t("form.scanEngineRequired")); return false }
|
||||
return true
|
||||
case 2: // Edit config
|
||||
case 3: // Edit config
|
||||
if (!configuration.trim()) { toast.error(t("form.configurationRequired")); return false }
|
||||
if (!isYamlValid) { toast.error(t("form.yamlInvalid")); return false }
|
||||
return true
|
||||
case 3: // Schedule
|
||||
case 4: // Schedule
|
||||
const parts = cronExpression.trim().split(/\s+/)
|
||||
if (parts.length !== 5) { toast.error(t("form.cronRequired")); return false }
|
||||
return true
|
||||
@@ -352,7 +357,7 @@ export function CreateScheduledScanDialog({
|
||||
</DialogHeader>
|
||||
|
||||
<div className="border-t h-[480px] overflow-hidden">
|
||||
{/* Step 1: Basic Info + Scan Mode */}
|
||||
{/* Step 1: Basic Info + Scan Mode (full mode only) */}
|
||||
{currentStep === 1 && !hasPreset && (
|
||||
<div className="p-6 space-y-6 overflow-y-auto h-full">
|
||||
<div className="space-y-2">
|
||||
@@ -394,6 +399,29 @@ export function CreateScheduledScanDialog({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Step 1: Basic Info (preset mode - name only, target is locked) */}
|
||||
{currentStep === 1 && hasPreset && (
|
||||
<div className="p-6 space-y-6 overflow-y-auto h-full">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="name">{t("form.taskName")} *</Label>
|
||||
<Input id="name" placeholder={t("form.taskNamePlaceholder")} value={name} onChange={(e) => setName(e.target.value)} />
|
||||
<p className="text-xs text-muted-foreground">{t("form.taskNameDesc")}</p>
|
||||
</div>
|
||||
<Separator />
|
||||
<div className="space-y-3">
|
||||
<Label>{t("form.scanTarget")}</Label>
|
||||
<div className="flex items-center gap-2 p-4 border rounded-lg bg-muted/50">
|
||||
<IconTarget className="h-5 w-5 text-muted-foreground" />
|
||||
<span className="font-medium">{presetTargetName || presetOrganizationName}</span>
|
||||
<Badge variant="secondary" className="ml-auto">
|
||||
{presetTargetId ? t("form.targetScan") : t("form.organizationScan")}
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">{t("form.presetTargetHint")}</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Step 2: Select Target (Organization or Target) */}
|
||||
{currentStep === 2 && !hasPreset && (
|
||||
<div className="p-6 space-y-4 overflow-y-auto h-full">
|
||||
@@ -475,8 +503,8 @@ export function CreateScheduledScanDialog({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Step 3 (full) / Step 1 (preset): Select Engine */}
|
||||
{((currentStep === 3 && !hasPreset) || (currentStep === 1 && hasPreset)) && engines.length > 0 && (
|
||||
{/* Step 3 (full) / Step 2 (preset): Select Engine */}
|
||||
{((currentStep === 3 && !hasPreset) || (currentStep === 2 && hasPreset)) && engines.length > 0 && (
|
||||
<EnginePresetSelector
|
||||
engines={engines}
|
||||
selectedEngineIds={engineIds}
|
||||
@@ -488,8 +516,8 @@ export function CreateScheduledScanDialog({
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Step 4 (full) / Step 2 (preset): Edit Configuration */}
|
||||
{((currentStep === 4 && !hasPreset) || (currentStep === 2 && hasPreset)) && (
|
||||
{/* Step 4 (full) / Step 3 (preset): Edit Configuration */}
|
||||
{((currentStep === 4 && !hasPreset) || (currentStep === 3 && hasPreset)) && (
|
||||
<ScanConfigEditor
|
||||
configuration={configuration}
|
||||
onChange={handleManualConfigChange}
|
||||
@@ -500,8 +528,8 @@ export function CreateScheduledScanDialog({
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Step 5 (full) / Step 3 (preset): Schedule Settings */}
|
||||
{((currentStep === 5 && !hasPreset) || (currentStep === 3 && hasPreset)) && (
|
||||
{/* Step 5 (full) / Step 4 (preset): Schedule Settings */}
|
||||
{((currentStep === 5 && !hasPreset) || (currentStep === 4 && hasPreset)) && (
|
||||
<div className="p-6 space-y-6 overflow-y-auto h-full">
|
||||
<div className="space-y-2">
|
||||
<Label>{t("form.cronExpression")} *</Label>
|
||||
|
||||
@@ -8,11 +8,21 @@ interface AnsiLogViewerProps {
|
||||
className?: string
|
||||
}
|
||||
|
||||
// 日志级别颜色配置
|
||||
const LOG_LEVEL_COLORS: Record<string, string> = {
|
||||
DEBUG: "#4ec9b0", // cyan
|
||||
INFO: "#6a9955", // green
|
||||
WARNING: "#dcdcaa", // yellow
|
||||
WARN: "#dcdcaa", // yellow
|
||||
ERROR: "#f44747", // red
|
||||
CRITICAL: "#f44747", // red (bold handled separately)
|
||||
}
|
||||
|
||||
// 创建 ANSI 转换器实例
|
||||
const converter = new AnsiToHtml({
|
||||
const ansiConverter = new AnsiToHtml({
|
||||
fg: "#d4d4d4",
|
||||
bg: "#1e1e1e",
|
||||
newline: true,
|
||||
newline: false, // 我们自己处理换行
|
||||
escapeXML: true,
|
||||
colors: {
|
||||
0: "#1e1e1e", // black
|
||||
@@ -34,14 +44,57 @@ const converter = new AnsiToHtml({
|
||||
},
|
||||
})
|
||||
|
||||
// 检测内容是否包含 ANSI 颜色码
|
||||
function hasAnsiCodes(text: string): boolean {
|
||||
// ANSI 转义序列通常以 ESC[ 开头(\x1b[ 或 \u001b[)
|
||||
return /\x1b\[|\u001b\[/.test(text)
|
||||
}
|
||||
|
||||
// 解析纯文本日志内容,为日志级别添加颜色
|
||||
function colorizeLogContent(content: string): string {
|
||||
// 匹配日志格式: [时间] [级别] [模块:行号] 消息
|
||||
// 例如: [2025-01-05 10:30:00] [INFO] [apps.scan:123] 消息内容
|
||||
const logLineRegex = /^(\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\]) (\[(DEBUG|INFO|WARNING|WARN|ERROR|CRITICAL)\]) (.*)$/
|
||||
|
||||
return content
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
const match = line.match(logLineRegex)
|
||||
|
||||
if (match) {
|
||||
const [, timestamp, levelBracket, level, rest] = match
|
||||
const color = LOG_LEVEL_COLORS[level] || "#d4d4d4"
|
||||
// ansiConverter.toHtml 已经处理了 HTML 转义
|
||||
const escapedTimestamp = ansiConverter.toHtml(timestamp)
|
||||
const escapedLevelBracket = ansiConverter.toHtml(levelBracket)
|
||||
const escapedRest = ansiConverter.toHtml(rest)
|
||||
|
||||
// 时间戳灰色,日志级别带颜色,其余默认色
|
||||
return `<span style="color:#808080">${escapedTimestamp}</span> <span style="color:${color};font-weight:${level === "CRITICAL" ? "bold" : "normal"}">${escapedLevelBracket}</span> ${escapedRest}`
|
||||
}
|
||||
|
||||
// 非标准格式的行,也进行 HTML 转义
|
||||
return ansiConverter.toHtml(line)
|
||||
})
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
export function AnsiLogViewer({ content, className }: AnsiLogViewerProps) {
|
||||
const containerRef = useRef<HTMLPreElement>(null)
|
||||
const isAtBottomRef = useRef(true) // 跟踪用户是否在底部
|
||||
|
||||
// 将 ANSI 转换为 HTML
|
||||
// 解析日志并添加颜色
|
||||
// 支持两种模式:ANSI 颜色码和纯文本日志级别解析
|
||||
const htmlContent = useMemo(() => {
|
||||
if (!content) return ""
|
||||
return converter.toHtml(content)
|
||||
|
||||
// 如果包含 ANSI 颜色码,直接转换
|
||||
if (hasAnsiCodes(content)) {
|
||||
return ansiConverter.toHtml(content)
|
||||
}
|
||||
|
||||
// 否则解析日志级别添加颜色
|
||||
return colorizeLogContent(content)
|
||||
}, [content])
|
||||
|
||||
// 监听滚动事件,检测用户是否在底部
|
||||
|
||||
359
frontend/components/target/target-overview.tsx
Normal file
359
frontend/components/target/target-overview.tsx
Normal file
@@ -0,0 +1,359 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState } from "react"
|
||||
import Link from "next/link"
|
||||
import { useTranslations, useLocale } from "next-intl"
|
||||
import {
|
||||
Globe,
|
||||
Network,
|
||||
Server,
|
||||
Link2,
|
||||
FolderOpen,
|
||||
ShieldAlert,
|
||||
AlertTriangle,
|
||||
Clock,
|
||||
Calendar,
|
||||
ChevronRight,
|
||||
CheckCircle2,
|
||||
PauseCircle,
|
||||
Play,
|
||||
} from "lucide-react"
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { useTarget } from "@/hooks/use-targets"
|
||||
import { useScheduledScans } from "@/hooks/use-scheduled-scans"
|
||||
import { ScanHistoryList } from "@/components/scan/history/scan-history-list"
|
||||
import { InitiateScanDialog } from "@/components/scan/initiate-scan-dialog"
|
||||
import { getDateLocale } from "@/lib/date-utils"
|
||||
|
||||
interface TargetOverviewProps {
|
||||
targetId: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Target overview component
|
||||
* Displays statistics cards for the target
|
||||
*/
|
||||
export function TargetOverview({ targetId }: TargetOverviewProps) {
|
||||
const t = useTranslations("pages.targetDetail.overview")
|
||||
const locale = useLocale()
|
||||
|
||||
const [scanDialogOpen, setScanDialogOpen] = useState(false)
|
||||
|
||||
const { data: target, isLoading, error } = useTarget(targetId)
|
||||
const { data: scheduledScansData, isLoading: isLoadingScans } = useScheduledScans({
|
||||
targetId,
|
||||
pageSize: 5
|
||||
})
|
||||
|
||||
const scheduledScans = scheduledScansData?.results || []
|
||||
const totalScheduledScans = scheduledScansData?.total || 0
|
||||
const enabledScans = scheduledScans.filter(s => s.isEnabled)
|
||||
|
||||
// Format date helper
|
||||
const formatDate = (dateString: string | undefined): string => {
|
||||
if (!dateString) return "-"
|
||||
return new Date(dateString).toLocaleString(getDateLocale(locale), {
|
||||
year: "numeric",
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
})
|
||||
}
|
||||
|
||||
// Format short date for scheduled scans
|
||||
const formatShortDate = (dateString: string | undefined): string => {
|
||||
if (!dateString) return "-"
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const tomorrow = new Date(now)
|
||||
tomorrow.setDate(tomorrow.getDate() + 1)
|
||||
|
||||
// Check if it's today
|
||||
if (date.toDateString() === now.toDateString()) {
|
||||
return t("scheduledScans.today") + " " + date.toLocaleTimeString(getDateLocale(locale), {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
})
|
||||
}
|
||||
// Check if it's tomorrow
|
||||
if (date.toDateString() === tomorrow.toDateString()) {
|
||||
return t("scheduledScans.tomorrow") + " " + date.toLocaleTimeString(getDateLocale(locale), {
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
})
|
||||
}
|
||||
// Otherwise show date
|
||||
return date.toLocaleString(getDateLocale(locale), {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
})
|
||||
}
|
||||
|
||||
// Get next execution time from enabled scans
|
||||
const getNextExecution = () => {
|
||||
const enabledWithNextRun = enabledScans.filter(s => s.nextRunTime)
|
||||
if (enabledWithNextRun.length === 0) return null
|
||||
|
||||
const sorted = enabledWithNextRun.sort((a, b) =>
|
||||
new Date(a.nextRunTime!).getTime() - new Date(b.nextRunTime!).getTime()
|
||||
)
|
||||
return sorted[0]
|
||||
}
|
||||
|
||||
const nextExecution = getNextExecution()
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Stats cards skeleton */}
|
||||
<div className="grid gap-4 md:grid-cols-2 lg:grid-cols-3">
|
||||
{[...Array(6)].map((_, i) => (
|
||||
<Card key={i}>
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
|
||||
<Skeleton className="h-4 w-24" />
|
||||
<Skeleton className="h-4 w-4" />
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Skeleton className="h-8 w-16" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error || !target) {
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center py-12">
|
||||
<AlertTriangle className="h-10 w-10 text-destructive mb-4" />
|
||||
<p className="text-muted-foreground">{t("loadError")}</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const summary = (target as any).summary || {}
|
||||
const vulnSummary = summary.vulnerabilities || { total: 0, critical: 0, high: 0, medium: 0, low: 0 }
|
||||
|
||||
const assetCards = [
|
||||
{
|
||||
title: t("cards.websites"),
|
||||
value: summary.websites || 0,
|
||||
icon: Globe,
|
||||
href: `/target/${targetId}/websites/`,
|
||||
},
|
||||
{
|
||||
title: t("cards.subdomains"),
|
||||
value: summary.subdomains || 0,
|
||||
icon: Network,
|
||||
href: `/target/${targetId}/subdomain/`,
|
||||
},
|
||||
{
|
||||
title: t("cards.ips"),
|
||||
value: summary.ips || 0,
|
||||
icon: Server,
|
||||
href: `/target/${targetId}/ip-addresses/`,
|
||||
},
|
||||
{
|
||||
title: t("cards.urls"),
|
||||
value: summary.endpoints || 0,
|
||||
icon: Link2,
|
||||
href: `/target/${targetId}/endpoints/`,
|
||||
},
|
||||
{
|
||||
title: t("cards.directories"),
|
||||
value: summary.directories || 0,
|
||||
icon: FolderOpen,
|
||||
href: `/target/${targetId}/directories/`,
|
||||
},
|
||||
]
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Target info + Initiate Scan button */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-4 text-sm text-muted-foreground">
|
||||
<div className="flex items-center gap-1.5">
|
||||
<Calendar className="h-4 w-4" />
|
||||
<span>{t("createdAt")}: {formatDate(target.createdAt)}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-1.5">
|
||||
<Clock className="h-4 w-4" />
|
||||
<span>{t("lastScanned")}: {formatDate(target.lastScannedAt)}</span>
|
||||
</div>
|
||||
</div>
|
||||
<Button onClick={() => setScanDialogOpen(true)}>
|
||||
<Play className="h-4 w-4 mr-2" />
|
||||
{t("initiateScan")}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Asset statistics cards */}
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold mb-4">{t("assetsTitle")}</h3>
|
||||
<div className="grid gap-4 md:grid-cols-2 lg:grid-cols-5">
|
||||
{assetCards.map((card) => (
|
||||
<Link key={card.title} href={card.href}>
|
||||
<Card className="hover:border-primary/50 transition-colors cursor-pointer">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
|
||||
<CardTitle className="text-sm font-medium">{card.title}</CardTitle>
|
||||
<card.icon className="h-4 w-4 text-muted-foreground" />
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="text-2xl font-bold">{card.value.toLocaleString()}</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Scheduled Scans + Vulnerability Statistics (Two columns) */}
|
||||
<div className="grid gap-4 md:grid-cols-2">
|
||||
{/* Scheduled Scans Card */}
|
||||
<Card className="flex flex-col">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<Clock className="h-4 w-4 text-muted-foreground" />
|
||||
<CardTitle className="text-sm font-medium">{t("scheduledScans.title")}</CardTitle>
|
||||
</div>
|
||||
<Link href={`/target/${targetId}/settings/`}>
|
||||
<Button variant="ghost" size="sm" className="h-7 text-xs">
|
||||
{t("scheduledScans.manage")}
|
||||
<ChevronRight className="h-3 w-3 ml-1" />
|
||||
</Button>
|
||||
</Link>
|
||||
</CardHeader>
|
||||
<CardContent className="flex-1 flex flex-col">
|
||||
{isLoadingScans ? (
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-4 w-32" />
|
||||
<Skeleton className="h-4 w-48" />
|
||||
</div>
|
||||
) : totalScheduledScans === 0 ? (
|
||||
<div className="flex-1 flex flex-col items-center justify-center">
|
||||
<Clock className="h-8 w-8 text-muted-foreground/50 mb-2" />
|
||||
<p className="text-sm text-muted-foreground">{t("scheduledScans.empty")}</p>
|
||||
<Link href={`/target/${targetId}/settings/`}>
|
||||
<Button variant="link" size="sm" className="mt-1">
|
||||
{t("scheduledScans.createFirst")}
|
||||
</Button>
|
||||
</Link>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-3">
|
||||
{/* Stats row */}
|
||||
<div className="flex items-center gap-4 text-sm">
|
||||
<div>
|
||||
<span className="text-muted-foreground">{t("scheduledScans.configured")}: </span>
|
||||
<span className="font-medium">{totalScheduledScans}</span>
|
||||
</div>
|
||||
<div>
|
||||
<span className="text-muted-foreground">{t("scheduledScans.enabled")}: </span>
|
||||
<span className="font-medium text-green-600">{enabledScans.length}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Next execution */}
|
||||
{nextExecution && (
|
||||
<div className="text-sm">
|
||||
<span className="text-muted-foreground">{t("scheduledScans.nextRun")}: </span>
|
||||
<span className="font-medium">{formatShortDate(nextExecution.nextRunTime)}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Task list - max 2 items */}
|
||||
<div className="space-y-2 pt-2 border-t">
|
||||
{scheduledScans.slice(0, 2).map((scan) => (
|
||||
<div key={scan.id} className="flex items-center gap-2 text-sm">
|
||||
{scan.isEnabled ? (
|
||||
<CheckCircle2 className="h-3.5 w-3.5 text-green-500 shrink-0" />
|
||||
) : (
|
||||
<PauseCircle className="h-3.5 w-3.5 text-muted-foreground shrink-0" />
|
||||
)}
|
||||
<span className={`truncate ${!scan.isEnabled ? 'text-muted-foreground' : ''}`}>
|
||||
{scan.name}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
{totalScheduledScans > 2 && (
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{t("scheduledScans.more", { count: totalScheduledScans - 2 })}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Vulnerability Statistics Card */}
|
||||
<Link href={`/target/${targetId}/vulnerabilities/`} className="block">
|
||||
<Card className="h-full hover:border-primary/50 transition-colors cursor-pointer flex flex-col">
|
||||
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<ShieldAlert className="h-4 w-4 text-red-500" />
|
||||
<CardTitle className="text-sm font-medium">{t("vulnerabilitiesTitle")}</CardTitle>
|
||||
</div>
|
||||
<Button variant="ghost" size="sm" className="h-7 text-xs">
|
||||
{t("viewAll")}
|
||||
<ChevronRight className="h-3 w-3 ml-1" />
|
||||
</Button>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Total count */}
|
||||
<div className="flex items-baseline gap-2">
|
||||
<span className="text-3xl font-bold">{vulnSummary.total}</span>
|
||||
<span className="text-sm text-muted-foreground">{t("cards.vulnerabilities")}</span>
|
||||
</div>
|
||||
|
||||
{/* Severity breakdown */}
|
||||
<div className="grid grid-cols-2 gap-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="w-3 h-3 rounded-full bg-red-500" />
|
||||
<span className="text-sm text-muted-foreground">{t("severity.critical")}</span>
|
||||
<span className="text-sm font-medium ml-auto">{vulnSummary.critical}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="w-3 h-3 rounded-full bg-orange-500" />
|
||||
<span className="text-sm text-muted-foreground">{t("severity.high")}</span>
|
||||
<span className="text-sm font-medium ml-auto">{vulnSummary.high}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="w-3 h-3 rounded-full bg-yellow-500" />
|
||||
<span className="text-sm text-muted-foreground">{t("severity.medium")}</span>
|
||||
<span className="text-sm font-medium ml-auto">{vulnSummary.medium}</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="w-3 h-3 rounded-full bg-blue-500" />
|
||||
<span className="text-sm text-muted-foreground">{t("severity.low")}</span>
|
||||
<span className="text-sm font-medium ml-auto">{vulnSummary.low}</span>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
{/* Scan history */}
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold mb-4">{t("scanHistoryTitle")}</h3>
|
||||
<ScanHistoryList targetId={targetId} hideToolbar pageSize={5} hideTargetColumn pageSizeOptions={[5, 10, 20, 50, 100]} />
|
||||
</div>
|
||||
|
||||
{/* Initiate Scan Dialog */}
|
||||
<InitiateScanDialog
|
||||
open={scanDialogOpen}
|
||||
onOpenChange={setScanDialogOpen}
|
||||
targetId={targetId}
|
||||
targetName={target.name}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
369
frontend/components/target/target-settings.tsx
Normal file
369
frontend/components/target/target-settings.tsx
Normal file
@@ -0,0 +1,369 @@
|
||||
"use client"
|
||||
|
||||
import React, { useState, useEffect } from "react"
|
||||
import { useTranslations, useLocale } from "next-intl"
|
||||
import { AlertTriangle, Loader2, Ban, Clock } from "lucide-react"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Textarea } from "@/components/ui/textarea"
|
||||
import { Skeleton } from "@/components/ui/skeleton"
|
||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog"
|
||||
import { useTargetBlacklist, useUpdateTargetBlacklist, useTarget } from "@/hooks/use-targets"
|
||||
import { useScheduledScans, useToggleScheduledScan, useDeleteScheduledScan } from "@/hooks/use-scheduled-scans"
|
||||
import { ScheduledScanDataTable } from "@/components/scan/scheduled/scheduled-scan-data-table"
|
||||
import { createScheduledScanColumns } from "@/components/scan/scheduled/scheduled-scan-columns"
|
||||
import { CreateScheduledScanDialog } from "@/components/scan/scheduled/create-scheduled-scan-dialog"
|
||||
import { EditScheduledScanDialog } from "@/components/scan/scheduled/edit-scheduled-scan-dialog"
|
||||
import { DataTableSkeleton } from "@/components/ui/data-table-skeleton"
|
||||
import type { ScheduledScan } from "@/types/scheduled-scan.types"
|
||||
|
||||
interface TargetSettingsProps {
|
||||
targetId: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Target settings component
|
||||
* Contains blacklist configuration and scheduled scans
|
||||
*/
|
||||
export function TargetSettings({ targetId }: TargetSettingsProps) {
|
||||
const t = useTranslations("pages.targetDetail.settings")
|
||||
const tColumns = useTranslations("columns")
|
||||
const tCommon = useTranslations("common")
|
||||
const tScan = useTranslations("scan")
|
||||
const tConfirm = useTranslations("common.confirm")
|
||||
const locale = useLocale()
|
||||
|
||||
const [blacklistText, setBlacklistText] = useState("")
|
||||
const [hasChanges, setHasChanges] = useState(false)
|
||||
|
||||
// Scheduled scan states
|
||||
const [createDialogOpen, setCreateDialogOpen] = useState(false)
|
||||
const [editDialogOpen, setEditDialogOpen] = useState(false)
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false)
|
||||
const [editingScheduledScan, setEditingScheduledScan] = useState<ScheduledScan | null>(null)
|
||||
const [deletingScheduledScan, setDeletingScheduledScan] = useState<ScheduledScan | null>(null)
|
||||
|
||||
// Pagination state
|
||||
const [page, setPage] = useState(1)
|
||||
const [pageSize, setPageSize] = useState(10)
|
||||
const [searchQuery, setSearchQuery] = useState("")
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
// Fetch target data for preset name
|
||||
const { data: target } = useTarget(targetId)
|
||||
|
||||
// Fetch blacklist data
|
||||
const { data, isLoading, error } = useTargetBlacklist(targetId)
|
||||
const updateBlacklist = useUpdateTargetBlacklist()
|
||||
|
||||
// Fetch scheduled scans for this target
|
||||
const {
|
||||
data: scheduledScansData,
|
||||
isLoading: isLoadingScans,
|
||||
isFetching,
|
||||
refetch
|
||||
} = useScheduledScans({
|
||||
targetId,
|
||||
page,
|
||||
pageSize,
|
||||
search: searchQuery || undefined
|
||||
})
|
||||
const { mutate: toggleScheduledScan } = useToggleScheduledScan()
|
||||
const { mutate: deleteScheduledScan } = useDeleteScheduledScan()
|
||||
|
||||
const scheduledScans = scheduledScansData?.results || []
|
||||
const total = scheduledScansData?.total || 0
|
||||
const totalPages = scheduledScansData?.totalPages || 1
|
||||
|
||||
// Build translation object for columns
|
||||
const translations = React.useMemo(() => ({
|
||||
columns: {
|
||||
taskName: tColumns("scheduledScan.taskName"),
|
||||
scanEngine: tColumns("scheduledScan.scanEngine"),
|
||||
cronExpression: tColumns("scheduledScan.cronExpression"),
|
||||
scope: tColumns("scheduledScan.scope"),
|
||||
status: tColumns("common.status"),
|
||||
nextRun: tColumns("scheduledScan.nextRun"),
|
||||
runCount: tColumns("scheduledScan.runCount"),
|
||||
lastRun: tColumns("scheduledScan.lastRun"),
|
||||
},
|
||||
actions: {
|
||||
editTask: tScan("editTask"),
|
||||
delete: tCommon("actions.delete"),
|
||||
openMenu: tCommon("actions.openMenu"),
|
||||
},
|
||||
status: {
|
||||
enabled: tCommon("status.enabled"),
|
||||
disabled: tCommon("status.disabled"),
|
||||
},
|
||||
cron: {
|
||||
everyMinute: tScan("cron.everyMinute"),
|
||||
everyNMinutes: tScan.raw("cron.everyNMinutes") as string,
|
||||
everyHour: tScan.raw("cron.everyHour") as string,
|
||||
everyNHours: tScan.raw("cron.everyNHours") as string,
|
||||
everyDay: tScan.raw("cron.everyDay") as string,
|
||||
everyWeek: tScan.raw("cron.everyWeek") as string,
|
||||
everyMonth: tScan.raw("cron.everyMonth") as string,
|
||||
weekdays: tScan.raw("cron.weekdays") as string[],
|
||||
},
|
||||
}), [tColumns, tCommon, tScan])
|
||||
|
||||
// Initialize text when data loads
|
||||
useEffect(() => {
|
||||
if (data?.patterns) {
|
||||
setBlacklistText(data.patterns.join("\n"))
|
||||
setHasChanges(false)
|
||||
}
|
||||
}, [data])
|
||||
|
||||
// Reset search state when request completes
|
||||
useEffect(() => {
|
||||
if (!isFetching && isSearching) {
|
||||
setIsSearching(false)
|
||||
}
|
||||
}, [isFetching, isSearching])
|
||||
|
||||
// Handle text change
|
||||
const handleTextChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
setBlacklistText(e.target.value)
|
||||
setHasChanges(true)
|
||||
}
|
||||
|
||||
// Handle save
|
||||
const handleSave = () => {
|
||||
const patterns = blacklistText
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0)
|
||||
|
||||
updateBlacklist.mutate(
|
||||
{ targetId, patterns },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setHasChanges(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// Format date
|
||||
const formatDate = React.useCallback((dateString: string) => {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleString(locale === "zh" ? "zh-CN" : "en-US", {
|
||||
year: "numeric",
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
})
|
||||
}, [locale])
|
||||
|
||||
// Edit task
|
||||
const handleEdit = React.useCallback((scan: ScheduledScan) => {
|
||||
setEditingScheduledScan(scan)
|
||||
setEditDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// Delete task (open confirmation dialog)
|
||||
const handleDelete = React.useCallback((scan: ScheduledScan) => {
|
||||
setDeletingScheduledScan(scan)
|
||||
setDeleteDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// Confirm delete task
|
||||
const confirmDelete = React.useCallback(() => {
|
||||
if (deletingScheduledScan) {
|
||||
deleteScheduledScan(deletingScheduledScan.id)
|
||||
setDeleteDialogOpen(false)
|
||||
setDeletingScheduledScan(null)
|
||||
}
|
||||
}, [deletingScheduledScan, deleteScheduledScan])
|
||||
|
||||
// Toggle task enabled status
|
||||
const handleToggleStatus = React.useCallback((scan: ScheduledScan, enabled: boolean) => {
|
||||
toggleScheduledScan({ id: scan.id, isEnabled: enabled })
|
||||
}, [toggleScheduledScan])
|
||||
|
||||
// Search handler
|
||||
const handleSearchChange = (value: string) => {
|
||||
setIsSearching(true)
|
||||
setSearchQuery(value)
|
||||
setPage(1)
|
||||
}
|
||||
|
||||
// Page change handler
|
||||
const handlePageChange = React.useCallback((newPage: number) => {
|
||||
setPage(newPage)
|
||||
}, [])
|
||||
|
||||
// Page size change handler
|
||||
const handlePageSizeChange = React.useCallback((newPageSize: number) => {
|
||||
setPageSize(newPageSize)
|
||||
setPage(1)
|
||||
}, [])
|
||||
|
||||
// Add new task
|
||||
const handleAddNew = React.useCallback(() => {
|
||||
setCreateDialogOpen(true)
|
||||
}, [])
|
||||
|
||||
// Create column definition (hide scope column since we're filtering by target)
|
||||
const columns = React.useMemo(() => {
|
||||
const allColumns = createScheduledScanColumns({
|
||||
formatDate,
|
||||
handleEdit,
|
||||
handleDelete,
|
||||
handleToggleStatus,
|
||||
t: translations,
|
||||
})
|
||||
// Filter out the scope column since all scans are for this target
|
||||
return allColumns.filter(col => (col as { accessorKey?: string }).accessorKey !== 'scanMode')
|
||||
}, [formatDate, handleEdit, handleDelete, handleToggleStatus, translations])
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<div className="space-y-2">
|
||||
<Skeleton className="h-6 w-32" />
|
||||
<Skeleton className="h-4 w-96" />
|
||||
</div>
|
||||
<Skeleton className="h-48 w-full" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center py-12">
|
||||
<AlertTriangle className="h-10 w-10 text-destructive mb-4" />
|
||||
<p className="text-muted-foreground">{t("loadError")}</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Blacklist section */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex items-center gap-2">
|
||||
<Ban className="h-5 w-5 text-muted-foreground" />
|
||||
<CardTitle>{t("blacklist.title")}</CardTitle>
|
||||
</div>
|
||||
<CardDescription>{t("blacklist.description")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
{/* Rules hint */}
|
||||
<div className="flex flex-wrap items-center gap-x-4 gap-y-2 text-sm text-muted-foreground">
|
||||
<span className="font-medium text-foreground">{t("blacklist.rulesTitle")}:</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*.gov</code> {t("blacklist.rules.domainShort")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">*cdn*</code> {t("blacklist.rules.keywordShort")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">192.168.1.1</code> {t("blacklist.rules.ipShort")}</span>
|
||||
<span><code className="bg-muted px-1.5 py-0.5 rounded text-xs">10.0.0.0/8</code> {t("blacklist.rules.cidrShort")}</span>
|
||||
</div>
|
||||
|
||||
{/* Input */}
|
||||
<Textarea
|
||||
value={blacklistText}
|
||||
onChange={handleTextChange}
|
||||
placeholder={t("blacklist.placeholder")}
|
||||
className="min-h-[240px] font-mono text-sm"
|
||||
/>
|
||||
|
||||
{/* Save button */}
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
onClick={handleSave}
|
||||
disabled={!hasChanges || updateBlacklist.isPending}
|
||||
>
|
||||
{updateBlacklist.isPending && (
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
)}
|
||||
{t("blacklist.save")}
|
||||
</Button>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Scheduled Scans section */}
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<div className="flex items-center gap-2">
|
||||
<Clock className="h-5 w-5 text-muted-foreground" />
|
||||
<CardTitle>{t("scheduledScans.title")}</CardTitle>
|
||||
</div>
|
||||
<CardDescription>{t("scheduledScans.description")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{isLoadingScans ? (
|
||||
<DataTableSkeleton rows={3} columns={6} toolbarButtonCount={1} />
|
||||
) : (
|
||||
<ScheduledScanDataTable
|
||||
data={scheduledScans}
|
||||
columns={columns}
|
||||
onAddNew={handleAddNew}
|
||||
searchPlaceholder={tScan("scheduled.searchPlaceholder")}
|
||||
searchValue={searchQuery}
|
||||
onSearch={handleSearchChange}
|
||||
isSearching={isSearching}
|
||||
addButtonText={tScan("scheduled.createTitle")}
|
||||
page={page}
|
||||
pageSize={pageSize}
|
||||
total={total}
|
||||
totalPages={totalPages}
|
||||
onPageChange={handlePageChange}
|
||||
onPageSizeChange={handlePageSizeChange}
|
||||
/>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Create Dialog */}
|
||||
<CreateScheduledScanDialog
|
||||
open={createDialogOpen}
|
||||
onOpenChange={setCreateDialogOpen}
|
||||
presetTargetId={targetId}
|
||||
presetTargetName={target?.name}
|
||||
onSuccess={() => refetch()}
|
||||
/>
|
||||
|
||||
{/* Edit Dialog */}
|
||||
<EditScheduledScanDialog
|
||||
open={editDialogOpen}
|
||||
onOpenChange={setEditDialogOpen}
|
||||
scheduledScan={editingScheduledScan}
|
||||
onSuccess={() => refetch()}
|
||||
/>
|
||||
|
||||
{/* Delete Confirmation Dialog */}
|
||||
<AlertDialog open={deleteDialogOpen} onOpenChange={setDeleteDialogOpen}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>{tConfirm("deleteTitle")}</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
{tConfirm("deleteScheduledScanMessage", { name: deletingScheduledScan?.name ?? "" })}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel>{tCommon("actions.cancel")}</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={confirmDelete}
|
||||
className="bg-destructive text-destructive-foreground hover:bg-destructive/90"
|
||||
>
|
||||
{tCommon("actions.delete")}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -18,15 +18,22 @@ function Tabs({
|
||||
)
|
||||
}
|
||||
|
||||
interface TabsListProps extends React.ComponentProps<typeof TabsPrimitive.List> {
|
||||
variant?: "default" | "underline"
|
||||
}
|
||||
|
||||
function TabsList({
|
||||
className,
|
||||
variant = "default",
|
||||
...props
|
||||
}: React.ComponentProps<typeof TabsPrimitive.List>) {
|
||||
}: TabsListProps) {
|
||||
return (
|
||||
<TabsPrimitive.List
|
||||
data-slot="tabs-list"
|
||||
className={cn(
|
||||
"bg-muted text-muted-foreground inline-flex h-9 w-fit items-center justify-center rounded-lg p-[3px]",
|
||||
"inline-flex w-fit items-center justify-center",
|
||||
variant === "default" && "bg-muted text-muted-foreground h-9 rounded-lg p-[3px]",
|
||||
variant === "underline" && "h-10 gap-4 border-b border-border bg-transparent p-0",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
@@ -34,15 +41,22 @@ function TabsList({
|
||||
)
|
||||
}
|
||||
|
||||
interface TabsTriggerProps extends React.ComponentProps<typeof TabsPrimitive.Trigger> {
|
||||
variant?: "default" | "underline"
|
||||
}
|
||||
|
||||
function TabsTrigger({
|
||||
className,
|
||||
variant = "default",
|
||||
...props
|
||||
}: React.ComponentProps<typeof TabsPrimitive.Trigger>) {
|
||||
}: TabsTriggerProps) {
|
||||
return (
|
||||
<TabsPrimitive.Trigger
|
||||
data-slot="tabs-trigger"
|
||||
className={cn(
|
||||
"data-[state=active]:bg-background dark:data-[state=active]:text-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:outline-ring dark:data-[state=active]:border-zinc-500 dark:data-[state=active]:bg-input/30 text-foreground dark:text-muted-foreground inline-flex h-[calc(100%-1px)] flex-1 items-center justify-center gap-1.5 rounded-md border border-transparent px-2 py-1 text-sm font-medium whitespace-nowrap cursor-pointer transition-[color,box-shadow] focus-visible:ring-[1px] focus-visible:outline-1 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:shadow-sm [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||
"inline-flex items-center justify-center gap-1.5 text-sm font-medium whitespace-nowrap cursor-pointer transition-all focus-visible:outline-none disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4",
|
||||
variant === "default" && "data-[state=active]:bg-background dark:data-[state=active]:text-foreground focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:outline-ring dark:data-[state=active]:border-zinc-500 dark:data-[state=active]:bg-input/30 text-foreground dark:text-muted-foreground h-[calc(100%-1px)] flex-1 rounded-md border border-transparent px-2 py-1 focus-visible:ring-[1px] focus-visible:outline-1 data-[state=active]:shadow-sm",
|
||||
variant === "underline" && "text-muted-foreground data-[state=active]:text-foreground h-10 px-1 pb-3 -mb-px border-b-2 border-transparent data-[state=active]:border-primary rounded-none bg-transparent",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
|
||||
29
frontend/hooks/use-api-key-settings.ts
Normal file
29
frontend/hooks/use-api-key-settings.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { ApiKeySettingsService } from '@/services/api-key-settings.service'
|
||||
import type { ApiKeySettings } from '@/types/api-key-settings.types'
|
||||
import { useToastMessages } from '@/lib/toast-helpers'
|
||||
import { getErrorCode } from '@/lib/response-parser'
|
||||
|
||||
export function useApiKeySettings() {
|
||||
return useQuery({
|
||||
queryKey: ['api-key-settings'],
|
||||
queryFn: () => ApiKeySettingsService.getSettings(),
|
||||
})
|
||||
}
|
||||
|
||||
export function useUpdateApiKeySettings() {
|
||||
const qc = useQueryClient()
|
||||
const toastMessages = useToastMessages()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: (data: Partial<ApiKeySettings>) =>
|
||||
ApiKeySettingsService.updateSettings(data),
|
||||
onSuccess: () => {
|
||||
qc.invalidateQueries({ queryKey: ['api-key-settings'] })
|
||||
toastMessages.success('toast.apiKeys.settings.success')
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toastMessages.errorFromCode(getErrorCode(error?.response?.data), 'toast.apiKeys.settings.error')
|
||||
},
|
||||
})
|
||||
}
|
||||
40
frontend/hooks/use-global-blacklist.ts
Normal file
40
frontend/hooks/use-global-blacklist.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'
|
||||
import { toast } from 'sonner'
|
||||
import { useTranslations } from 'next-intl'
|
||||
import {
|
||||
getGlobalBlacklist,
|
||||
updateGlobalBlacklist,
|
||||
type GlobalBlacklistResponse,
|
||||
type UpdateGlobalBlacklistRequest,
|
||||
} from '@/services/global-blacklist.service'
|
||||
|
||||
const QUERY_KEY = ['global-blacklist']
|
||||
|
||||
/**
|
||||
* Hook to fetch global blacklist
|
||||
*/
|
||||
export function useGlobalBlacklist() {
|
||||
return useQuery<GlobalBlacklistResponse>({
|
||||
queryKey: QUERY_KEY,
|
||||
queryFn: getGlobalBlacklist,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to update global blacklist
|
||||
*/
|
||||
export function useUpdateGlobalBlacklist() {
|
||||
const queryClient = useQueryClient()
|
||||
const t = useTranslations('pages.settings.blacklist')
|
||||
|
||||
return useMutation({
|
||||
mutationFn: (data: UpdateGlobalBlacklistRequest) => updateGlobalBlacklist(data),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: QUERY_KEY })
|
||||
toast.success(t('toast.saveSuccess'))
|
||||
},
|
||||
onError: () => {
|
||||
toast.error(t('toast.saveError'))
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -29,6 +29,17 @@ export function useRunningScans(page = 1, pageSize = 10) {
|
||||
return useScans({ page, pageSize, status: 'running' })
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取目标的扫描历史
|
||||
*/
|
||||
export function useTargetScans(targetId: number, pageSize = 5) {
|
||||
return useQuery({
|
||||
queryKey: ['scans', 'target', targetId, pageSize],
|
||||
queryFn: () => getScans({ target: targetId, pageSize }),
|
||||
enabled: !!targetId,
|
||||
})
|
||||
}
|
||||
|
||||
export function useScan(id: number) {
|
||||
return useQuery({
|
||||
queryKey: ['scan', id],
|
||||
|
||||
@@ -14,7 +14,13 @@ import type { CreateScheduledScanRequest, UpdateScheduledScanRequest } from '@/t
|
||||
/**
|
||||
* 获取定时扫描列表
|
||||
*/
|
||||
export function useScheduledScans(params: { page?: number; pageSize?: number; search?: string } = { page: 1, pageSize: 10 }) {
|
||||
export function useScheduledScans(params: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
search?: string
|
||||
targetId?: number
|
||||
organizationId?: number
|
||||
} = { page: 1, pageSize: 10 }) {
|
||||
return useQuery({
|
||||
queryKey: ['scheduled-scans', params],
|
||||
queryFn: () => getScheduledScans(params),
|
||||
|
||||
@@ -16,6 +16,8 @@ import {
|
||||
linkTargetOrganizations,
|
||||
unlinkTargetOrganizations,
|
||||
getTargetEndpoints,
|
||||
getTargetBlacklist,
|
||||
updateTargetBlacklist,
|
||||
} from '@/services/target.service'
|
||||
import type {
|
||||
CreateTargetRequest,
|
||||
@@ -304,3 +306,34 @@ export function useTargetEndpoints(
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取目标的黑名单规则
|
||||
*/
|
||||
export function useTargetBlacklist(targetId: number) {
|
||||
return useQuery({
|
||||
queryKey: ['targets', targetId, 'blacklist'],
|
||||
queryFn: () => getTargetBlacklist(targetId),
|
||||
enabled: !!targetId,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新目标的黑名单规则
|
||||
*/
|
||||
export function useUpdateTargetBlacklist() {
|
||||
const queryClient = useQueryClient()
|
||||
const toastMessages = useToastMessages()
|
||||
|
||||
return useMutation({
|
||||
mutationFn: ({ targetId, patterns }: { targetId: number; patterns: string[] }) =>
|
||||
updateTargetBlacklist(targetId, patterns),
|
||||
onSuccess: (_, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: ['targets', variables.targetId, 'blacklist'] })
|
||||
toastMessages.success('toast.blacklist.save.success')
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toastMessages.errorFromCode(getErrorCode(error?.response?.data), 'toast.blacklist.save.error')
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -319,6 +319,8 @@
|
||||
"workers": "Workers",
|
||||
"systemLogs": "System Logs",
|
||||
"notifications": "Notifications",
|
||||
"apiKeys": "API Keys",
|
||||
"globalBlacklist": "Global Blacklist",
|
||||
"help": "Get Help",
|
||||
"feedback": "Feedback"
|
||||
},
|
||||
@@ -836,7 +838,9 @@
|
||||
"noConfig": "No config",
|
||||
"capabilitiesCount": "{count} capabilities",
|
||||
"selected": "Selected",
|
||||
"selectedEngines": "{count} engines selected"
|
||||
"selectedEngines": "{count} engines selected",
|
||||
"scanTarget": "Scan Target",
|
||||
"presetTargetHint": "Target is preset and cannot be changed. To scan other targets, create from the global scheduled scans page."
|
||||
},
|
||||
"presets": {
|
||||
"everyHour": "Every Hour",
|
||||
@@ -1690,6 +1694,12 @@
|
||||
"error": "Notification connection error: {message}"
|
||||
}
|
||||
},
|
||||
"apiKeys": {
|
||||
"settings": {
|
||||
"success": "API key settings saved",
|
||||
"error": "Failed to save API key settings"
|
||||
}
|
||||
},
|
||||
"tool": {
|
||||
"create": {
|
||||
"success": "Tool created successfully",
|
||||
@@ -1759,6 +1769,12 @@
|
||||
"error": "Failed to fetch system logs, please check backend",
|
||||
"recovered": "System log connection recovered"
|
||||
}
|
||||
},
|
||||
"blacklist": {
|
||||
"save": {
|
||||
"success": "Blacklist rules saved",
|
||||
"error": "Failed to save blacklist rules"
|
||||
}
|
||||
}
|
||||
},
|
||||
"quickScan": {
|
||||
@@ -2005,6 +2021,9 @@
|
||||
},
|
||||
"targetDetail": {
|
||||
"noDescription": "No description",
|
||||
"breadcrumb": {
|
||||
"targetDetail": "Target Detail"
|
||||
},
|
||||
"error": {
|
||||
"title": "Load Failed",
|
||||
"message": "An error occurred while fetching target data"
|
||||
@@ -2012,11 +2031,138 @@
|
||||
"notFound": {
|
||||
"title": "Target Not Found",
|
||||
"message": "Target with ID {id} not found"
|
||||
},
|
||||
"tabs": {
|
||||
"overview": "Overview",
|
||||
"assets": "Assets",
|
||||
"vulnerabilities": "Vulnerabilities",
|
||||
"settings": "Settings"
|
||||
},
|
||||
"settings": {
|
||||
"loadError": "Failed to load settings",
|
||||
"blacklist": {
|
||||
"title": "Blacklist Rules",
|
||||
"description": "Assets matching the following rules will be automatically excluded during scanning.",
|
||||
"rulesTitle": "Supported Rule Types",
|
||||
"rules": {
|
||||
"domain": "Domain wildcard, matches specified suffix",
|
||||
"domainShort": "Domain",
|
||||
"keyword": "Keyword match, contains specified string",
|
||||
"keywordShort": "Keyword",
|
||||
"ip": "Exact IP address match",
|
||||
"ipShort": "IP",
|
||||
"cidr": "Matches IP range",
|
||||
"cidrShort": "CIDR"
|
||||
},
|
||||
"placeholder": "Enter rules, one per line\n\nExamples:\n*.gov\n*.edu\n*cdn*\n192.168.0.0/16\n10.0.0.1",
|
||||
"save": "Save Rules"
|
||||
},
|
||||
"scheduledScans": {
|
||||
"title": "Scheduled Scans",
|
||||
"description": "Configure automated scan tasks for this target",
|
||||
"create": "New Scheduled Scan",
|
||||
"empty": "No scheduled scans",
|
||||
"emptyHint": "Click the button above to create a scheduled scan",
|
||||
"enabled": "Enabled",
|
||||
"disabled": "Disabled",
|
||||
"nextRun": "Next run",
|
||||
"runCount": "Run count",
|
||||
"edit": "Edit",
|
||||
"delete": "Delete",
|
||||
"cronDaily": "Daily at {time}",
|
||||
"cronWeekly": "Every {day} at {time}",
|
||||
"cronMonthly": "Monthly on day {day} at {time}",
|
||||
"weekdays": {
|
||||
"sun": "Sunday",
|
||||
"mon": "Monday",
|
||||
"tue": "Tuesday",
|
||||
"wed": "Wednesday",
|
||||
"thu": "Thursday",
|
||||
"fri": "Friday",
|
||||
"sat": "Saturday"
|
||||
},
|
||||
"deleteConfirm": {
|
||||
"title": "Confirm Delete",
|
||||
"description": "Are you sure you want to delete the scheduled scan \"{name}\"? This action cannot be undone.",
|
||||
"cancel": "Cancel",
|
||||
"confirm": "Delete"
|
||||
}
|
||||
}
|
||||
},
|
||||
"overview": {
|
||||
"loadError": "Failed to load target data",
|
||||
"createdAt": "Created",
|
||||
"lastScanned": "Last Scanned",
|
||||
"assetsTitle": "Assets",
|
||||
"vulnerabilitiesTitle": "Vulnerabilities",
|
||||
"scanHistoryTitle": "Scan History",
|
||||
"recentScans": "Recent Scans",
|
||||
"noScans": "No scan records",
|
||||
"viewAll": "View all",
|
||||
"cards": {
|
||||
"websites": "Websites",
|
||||
"subdomains": "Subdomains",
|
||||
"ips": "IP Addresses",
|
||||
"urls": "URLs",
|
||||
"directories": "Directories",
|
||||
"vulnerabilities": "Total Vulnerabilities"
|
||||
},
|
||||
"severity": {
|
||||
"critical": "Critical",
|
||||
"high": "High",
|
||||
"medium": "Medium",
|
||||
"low": "Low"
|
||||
},
|
||||
"scanStatus": {
|
||||
"completed": "Completed",
|
||||
"running": "Running",
|
||||
"failed": "Failed",
|
||||
"cancelled": "Cancelled",
|
||||
"initiated": "Pending"
|
||||
},
|
||||
"scheduledScans": {
|
||||
"title": "Scheduled Scans",
|
||||
"manage": "Manage",
|
||||
"empty": "No scheduled scans",
|
||||
"createFirst": "Create your first scheduled scan",
|
||||
"configured": "Configured",
|
||||
"enabled": "Enabled",
|
||||
"nextRun": "Next run",
|
||||
"today": "Today",
|
||||
"tomorrow": "Tomorrow",
|
||||
"more": "+{count} more"
|
||||
},
|
||||
"initiateScan": "Initiate Scan"
|
||||
}
|
||||
},
|
||||
"nav": {
|
||||
"scanEngine": "Scan Engine",
|
||||
"wordlists": "Wordlist Management"
|
||||
},
|
||||
"settings": {
|
||||
"blacklist": {
|
||||
"title": "Global Blacklist",
|
||||
"description": "Configure global blacklist rules. Matching assets will be automatically excluded during scans.",
|
||||
"loadError": "Failed to load blacklist rules",
|
||||
"card": {
|
||||
"title": "Blacklist Rules",
|
||||
"description": "These rules apply to all target scans. To configure blacklist for a specific target, go to the target settings page."
|
||||
},
|
||||
"rules": {
|
||||
"title": "Supported rule types",
|
||||
"domain": "Domain",
|
||||
"keyword": "Keyword",
|
||||
"ip": "IP",
|
||||
"cidr": "CIDR"
|
||||
},
|
||||
"scopeHint": "Global rules apply to all targets. Target-level rules can be configured in Target → Settings.",
|
||||
"placeholder": "Enter rules, one per line\n\nExamples:\n*.gov\n*.edu\n*cdn*\n192.168.0.0/16\n10.0.0.1",
|
||||
"save": "Save Rules",
|
||||
"toast": {
|
||||
"saveSuccess": "Blacklist rules saved",
|
||||
"saveError": "Failed to save blacklist rules"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"metadata": {
|
||||
|
||||
@@ -319,6 +319,8 @@
|
||||
"workers": "扫描节点",
|
||||
"systemLogs": "系统日志",
|
||||
"notifications": "通知设置",
|
||||
"apiKeys": "API 密钥",
|
||||
"globalBlacklist": "全局黑名单",
|
||||
"help": "获取帮助",
|
||||
"feedback": "反馈建议"
|
||||
},
|
||||
@@ -836,7 +838,9 @@
|
||||
"noConfig": "无配置",
|
||||
"capabilitiesCount": "{count} 项能力",
|
||||
"selected": "已选择",
|
||||
"selectedEngines": "已选择 {count} 个引擎"
|
||||
"selectedEngines": "已选择 {count} 个引擎",
|
||||
"scanTarget": "扫描目标",
|
||||
"presetTargetHint": "目标已预设,无法更改。如需扫描其他目标,请从全局定时扫描页面创建。"
|
||||
},
|
||||
"presets": {
|
||||
"everyHour": "每小时",
|
||||
@@ -1690,6 +1694,12 @@
|
||||
"error": "通知连接错误: {message}"
|
||||
}
|
||||
},
|
||||
"apiKeys": {
|
||||
"settings": {
|
||||
"success": "API 密钥配置已保存",
|
||||
"error": "保存 API 密钥配置失败"
|
||||
}
|
||||
},
|
||||
"tool": {
|
||||
"create": {
|
||||
"success": "工具创建成功",
|
||||
@@ -1759,6 +1769,12 @@
|
||||
"error": "系统日志获取失败,请检查后端接口",
|
||||
"recovered": "系统日志连接已恢复"
|
||||
}
|
||||
},
|
||||
"blacklist": {
|
||||
"save": {
|
||||
"success": "黑名单规则已保存",
|
||||
"error": "保存黑名单规则失败"
|
||||
}
|
||||
}
|
||||
},
|
||||
"quickScan": {
|
||||
@@ -2005,6 +2021,9 @@
|
||||
},
|
||||
"targetDetail": {
|
||||
"noDescription": "暂无描述",
|
||||
"breadcrumb": {
|
||||
"targetDetail": "目标详情"
|
||||
},
|
||||
"error": {
|
||||
"title": "加载失败",
|
||||
"message": "获取目标数据时出现错误"
|
||||
@@ -2012,11 +2031,138 @@
|
||||
"notFound": {
|
||||
"title": "目标不存在",
|
||||
"message": "未找到ID为 {id} 的目标"
|
||||
},
|
||||
"tabs": {
|
||||
"overview": "概览",
|
||||
"assets": "资产",
|
||||
"vulnerabilities": "漏洞",
|
||||
"settings": "设置"
|
||||
},
|
||||
"settings": {
|
||||
"loadError": "加载设置失败",
|
||||
"blacklist": {
|
||||
"title": "黑名单规则",
|
||||
"description": "扫描时将自动排除匹配以下规则的资产。",
|
||||
"rulesTitle": "支持的规则类型",
|
||||
"rules": {
|
||||
"domain": "域名通配符,匹配指定后缀",
|
||||
"domainShort": "域名",
|
||||
"keyword": "关键词匹配,包含指定字符串",
|
||||
"keywordShort": "关键词",
|
||||
"ip": "精确匹配 IP 地址",
|
||||
"ipShort": "IP",
|
||||
"cidr": "匹配 IP 网段范围",
|
||||
"cidrShort": "CIDR"
|
||||
},
|
||||
"placeholder": "输入规则,每行一个\n\n示例:\n*.gov\n*.edu\n*cdn*\n192.168.0.0/16\n10.0.0.1",
|
||||
"save": "保存规则"
|
||||
},
|
||||
"scheduledScans": {
|
||||
"title": "定时扫描",
|
||||
"description": "为该目标配置自动执行的扫描任务",
|
||||
"create": "新建定时扫描",
|
||||
"empty": "暂无定时扫描任务",
|
||||
"emptyHint": "点击上方按钮创建定时扫描任务",
|
||||
"enabled": "已启用",
|
||||
"disabled": "已禁用",
|
||||
"nextRun": "下次执行",
|
||||
"runCount": "执行次数",
|
||||
"edit": "编辑",
|
||||
"delete": "删除",
|
||||
"cronDaily": "每天 {time}",
|
||||
"cronWeekly": "每周{day} {time}",
|
||||
"cronMonthly": "每月{day}日 {time}",
|
||||
"weekdays": {
|
||||
"sun": "日",
|
||||
"mon": "一",
|
||||
"tue": "二",
|
||||
"wed": "三",
|
||||
"thu": "四",
|
||||
"fri": "五",
|
||||
"sat": "六"
|
||||
},
|
||||
"deleteConfirm": {
|
||||
"title": "确认删除",
|
||||
"description": "确定要删除定时扫描任务「{name}」吗?此操作无法撤销。",
|
||||
"cancel": "取消",
|
||||
"confirm": "删除"
|
||||
}
|
||||
}
|
||||
},
|
||||
"overview": {
|
||||
"loadError": "加载目标数据失败",
|
||||
"createdAt": "创建时间",
|
||||
"lastScanned": "最后扫描",
|
||||
"assetsTitle": "资产统计",
|
||||
"vulnerabilitiesTitle": "漏洞统计",
|
||||
"scanHistoryTitle": "扫描历史",
|
||||
"recentScans": "最近扫描",
|
||||
"noScans": "暂无扫描记录",
|
||||
"viewAll": "查看全部",
|
||||
"cards": {
|
||||
"websites": "网站",
|
||||
"subdomains": "子域名",
|
||||
"ips": "IP 地址",
|
||||
"urls": "URL",
|
||||
"directories": "目录",
|
||||
"vulnerabilities": "漏洞总数"
|
||||
},
|
||||
"severity": {
|
||||
"critical": "严重",
|
||||
"high": "高危",
|
||||
"medium": "中危",
|
||||
"low": "低危"
|
||||
},
|
||||
"scanStatus": {
|
||||
"completed": "已完成",
|
||||
"running": "运行中",
|
||||
"failed": "失败",
|
||||
"cancelled": "已取消",
|
||||
"initiated": "等待中"
|
||||
},
|
||||
"scheduledScans": {
|
||||
"title": "定时扫描",
|
||||
"manage": "管理",
|
||||
"empty": "暂无定时扫描任务",
|
||||
"createFirst": "创建第一个定时扫描",
|
||||
"configured": "已配置",
|
||||
"enabled": "已启用",
|
||||
"nextRun": "下次执行",
|
||||
"today": "今天",
|
||||
"tomorrow": "明天",
|
||||
"more": "+{count} 更多"
|
||||
},
|
||||
"initiateScan": "发起扫描"
|
||||
}
|
||||
},
|
||||
"nav": {
|
||||
"scanEngine": "扫描引擎",
|
||||
"wordlists": "字典管理"
|
||||
},
|
||||
"settings": {
|
||||
"blacklist": {
|
||||
"title": "全局黑名单",
|
||||
"description": "配置全局黑名单规则,扫描时将自动排除匹配的资产",
|
||||
"loadError": "加载黑名单规则失败",
|
||||
"card": {
|
||||
"title": "黑名单规则",
|
||||
"description": "这些规则将应用于所有目标的扫描任务。如需为特定目标配置黑名单,请前往目标设置页面。"
|
||||
},
|
||||
"rules": {
|
||||
"title": "支持的规则类型",
|
||||
"domain": "域名",
|
||||
"keyword": "关键词",
|
||||
"ip": "IP",
|
||||
"cidr": "CIDR"
|
||||
},
|
||||
"scopeHint": "全局规则对所有目标生效。目标级规则可在「目标 → 设置」中单独配置。",
|
||||
"placeholder": "输入规则,每行一个\n\n示例:\n*.gov\n*.edu\n*cdn*\n192.168.0.0/16\n10.0.0.1",
|
||||
"save": "保存规则",
|
||||
"toast": {
|
||||
"saveSuccess": "黑名单规则已保存",
|
||||
"saveError": "保存黑名单规则失败"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"metadata": {
|
||||
|
||||
187
frontend/mock/data/directories.ts
Normal file
187
frontend/mock/data/directories.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import type { Directory, DirectoryListResponse } from '@/types/directory.types'
|
||||
|
||||
export const mockDirectories: Directory[] = [
|
||||
{
|
||||
id: 1,
|
||||
url: 'https://acme.com/admin',
|
||||
status: 200,
|
||||
contentLength: 12345,
|
||||
words: 1234,
|
||||
lines: 89,
|
||||
contentType: 'text/html',
|
||||
duration: 0.234,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
url: 'https://acme.com/api',
|
||||
status: 301,
|
||||
contentLength: 0,
|
||||
words: 0,
|
||||
lines: 0,
|
||||
contentType: 'text/html',
|
||||
duration: 0.056,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
url: 'https://acme.com/login',
|
||||
status: 200,
|
||||
contentLength: 8765,
|
||||
words: 567,
|
||||
lines: 45,
|
||||
contentType: 'text/html',
|
||||
duration: 0.189,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
url: 'https://acme.com/dashboard',
|
||||
status: 302,
|
||||
contentLength: 0,
|
||||
words: 0,
|
||||
lines: 0,
|
||||
contentType: 'text/html',
|
||||
duration: 0.078,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
url: 'https://acme.com/static/js/app.js',
|
||||
status: 200,
|
||||
contentLength: 456789,
|
||||
words: 12345,
|
||||
lines: 5678,
|
||||
contentType: 'application/javascript',
|
||||
duration: 0.345,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:04:00Z',
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
url: 'https://acme.com/.git/config',
|
||||
status: 200,
|
||||
contentLength: 234,
|
||||
words: 45,
|
||||
lines: 12,
|
||||
contentType: 'text/plain',
|
||||
duration: 0.023,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:05:00Z',
|
||||
},
|
||||
{
|
||||
id: 7,
|
||||
url: 'https://acme.com/backup.zip',
|
||||
status: 200,
|
||||
contentLength: 12345678,
|
||||
words: null,
|
||||
lines: null,
|
||||
contentType: 'application/zip',
|
||||
duration: 1.234,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:06:00Z',
|
||||
},
|
||||
{
|
||||
id: 8,
|
||||
url: 'https://acme.com/robots.txt',
|
||||
status: 200,
|
||||
contentLength: 567,
|
||||
words: 89,
|
||||
lines: 23,
|
||||
contentType: 'text/plain',
|
||||
duration: 0.034,
|
||||
websiteUrl: 'https://acme.com',
|
||||
createdAt: '2024-12-28T10:07:00Z',
|
||||
},
|
||||
{
|
||||
id: 9,
|
||||
url: 'https://api.acme.com/v1/health',
|
||||
status: 200,
|
||||
contentLength: 45,
|
||||
words: 5,
|
||||
lines: 1,
|
||||
contentType: 'application/json',
|
||||
duration: 0.012,
|
||||
websiteUrl: 'https://api.acme.com',
|
||||
createdAt: '2024-12-28T10:08:00Z',
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
url: 'https://api.acme.com/swagger-ui.html',
|
||||
status: 200,
|
||||
contentLength: 23456,
|
||||
words: 1234,
|
||||
lines: 234,
|
||||
contentType: 'text/html',
|
||||
duration: 0.267,
|
||||
websiteUrl: 'https://api.acme.com',
|
||||
createdAt: '2024-12-28T10:09:00Z',
|
||||
},
|
||||
{
|
||||
id: 11,
|
||||
url: 'https://techstart.io/wp-admin',
|
||||
status: 302,
|
||||
contentLength: 0,
|
||||
words: 0,
|
||||
lines: 0,
|
||||
contentType: 'text/html',
|
||||
duration: 0.089,
|
||||
websiteUrl: 'https://techstart.io',
|
||||
createdAt: '2024-12-26T08:45:00Z',
|
||||
},
|
||||
{
|
||||
id: 12,
|
||||
url: 'https://techstart.io/wp-login.php',
|
||||
status: 200,
|
||||
contentLength: 4567,
|
||||
words: 234,
|
||||
lines: 78,
|
||||
contentType: 'text/html',
|
||||
duration: 0.156,
|
||||
websiteUrl: 'https://techstart.io',
|
||||
createdAt: '2024-12-26T08:46:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockDirectories(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
targetId?: number
|
||||
scanId?: number
|
||||
}): DirectoryListResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockDirectories
|
||||
|
||||
if (filter) {
|
||||
filtered = filtered.filter(
|
||||
d =>
|
||||
d.url.toLowerCase().includes(filter) ||
|
||||
d.contentType.toLowerCase().includes(filter)
|
||||
)
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockDirectoryById(id: number): Directory | undefined {
|
||||
return mockDirectories.find(d => d.id === id)
|
||||
}
|
||||
593
frontend/mock/data/fingerprints.ts
Normal file
593
frontend/mock/data/fingerprints.ts
Normal file
@@ -0,0 +1,593 @@
|
||||
import type {
|
||||
EholeFingerprint,
|
||||
GobyFingerprint,
|
||||
WappalyzerFingerprint,
|
||||
FingersFingerprint,
|
||||
FingerPrintHubFingerprint,
|
||||
ARLFingerprint,
|
||||
FingerprintStats,
|
||||
} from '@/types/fingerprint.types'
|
||||
import type { PaginatedResponse } from '@/types/api-response.types'
|
||||
|
||||
// ==================== EHole 指纹数据(真实数据示例)====================
|
||||
export const mockEholeFingerprints: EholeFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
cms: '致远OA',
|
||||
method: 'keyword',
|
||||
location: 'body',
|
||||
keyword: ['/seeyon/USER-DATA/IMAGES/LOGIN/login.gif'],
|
||||
isImportant: true,
|
||||
type: 'oa',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
cms: '通达OA',
|
||||
method: 'keyword',
|
||||
location: 'body',
|
||||
keyword: ['/static/images/tongda.ico'],
|
||||
isImportant: true,
|
||||
type: 'oa',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
cms: 'Nexus Repository Manager',
|
||||
method: 'keyword',
|
||||
location: 'title',
|
||||
keyword: ['Nexus Repository Manager'],
|
||||
isImportant: true,
|
||||
type: 'cloud',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
cms: '禅道 zentao',
|
||||
method: 'keyword',
|
||||
location: 'title',
|
||||
keyword: ['Welcome to use zentao'],
|
||||
isImportant: true,
|
||||
type: 'oa',
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
cms: 'Kibana',
|
||||
method: 'keyword',
|
||||
location: 'title',
|
||||
keyword: ['Kibana'],
|
||||
isImportant: true,
|
||||
type: 'cloud',
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
cms: 'Spring env',
|
||||
method: 'keyword',
|
||||
location: 'body',
|
||||
keyword: ['Whitelabel Error Page'],
|
||||
isImportant: true,
|
||||
type: 'framework',
|
||||
createdAt: '2024-12-20T10:05:00Z',
|
||||
},
|
||||
{
|
||||
id: 7,
|
||||
cms: '泛微OA',
|
||||
method: 'keyword',
|
||||
location: 'header',
|
||||
keyword: ['ecology_JSessionid'],
|
||||
isImportant: true,
|
||||
type: 'oa',
|
||||
createdAt: '2024-12-20T10:06:00Z',
|
||||
},
|
||||
{
|
||||
id: 8,
|
||||
cms: '用友NC',
|
||||
method: 'keyword',
|
||||
location: 'body',
|
||||
keyword: ['UFIDA', '/nc/servlet/nc.ui.iufo.login.Index'],
|
||||
isImportant: true,
|
||||
type: 'oa',
|
||||
createdAt: '2024-12-20T10:07:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== Goby 指纹数据(真实数据示例)====================
|
||||
export const mockGobyFingerprints: GobyFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'WebSphere-App-Server',
|
||||
logic: '((a||b) &&c&&d) || (e&&f&&g)',
|
||||
rule: [
|
||||
{ label: 'a', feature: 'Server: WebSphere Application Server', is_equal: true },
|
||||
{ label: 'b', feature: 'IBM WebSphere Application Server', is_equal: true },
|
||||
{ label: 'c', feature: 'couchdb', is_equal: false },
|
||||
{ label: 'd', feature: 'drupal', is_equal: false },
|
||||
{ label: 'e', feature: 'Server: WebSphere Application Server', is_equal: true },
|
||||
{ label: 'f', feature: 'couchdb', is_equal: false },
|
||||
{ label: 'g', feature: 'drupal', is_equal: false },
|
||||
],
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'Wing-FTP-Server',
|
||||
logic: 'a||b||c||d',
|
||||
rule: [
|
||||
{ label: 'a', feature: 'Server: Wing FTP Server', is_equal: true },
|
||||
{ label: 'b', feature: 'Server: Wing FTP Server', is_equal: true },
|
||||
{ label: 'c', feature: '/help_javascript.htm', is_equal: true },
|
||||
{ label: 'd', feature: 'Wing FTP Server', is_equal: true },
|
||||
],
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'Fortinet-sslvpn',
|
||||
logic: 'a&&b',
|
||||
rule: [
|
||||
{ label: 'a', feature: 'fgt_lang', is_equal: true },
|
||||
{ label: 'b', feature: '/sslvpn/portal.html', is_equal: true },
|
||||
],
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'D-link-DSL-2640B',
|
||||
logic: 'a||b',
|
||||
rule: [
|
||||
{ label: 'a', feature: 'Product : DSL-2640B', is_equal: true },
|
||||
{ label: 'b', feature: 'D-Link DSL-2640B', is_equal: true },
|
||||
],
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'Kedacom-NVR',
|
||||
logic: 'a|| (b&&c) ||d',
|
||||
rule: [
|
||||
{ label: 'a', feature: 'NVR Station Web', is_equal: true },
|
||||
{ label: 'b', feature: 'location="index_cn.htm";', is_equal: true },
|
||||
{ label: 'c', feature: 'if(syslan == "zh-cn"', is_equal: true },
|
||||
{ label: 'd', feature: 'WMS browse NVR', is_equal: true },
|
||||
],
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== Wappalyzer 指纹数据(真实数据示例)====================
|
||||
export const mockWappalyzerFingerprints: WappalyzerFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: '1C-Bitrix',
|
||||
cats: [1, 6],
|
||||
cookies: { bitrix_sm_guest_id: '', bitrix_sm_last_ip: '', bitrix_sm_sale_uid: '' },
|
||||
headers: { 'set-cookie': 'bitrix_', 'x-powered-cms': 'bitrix site manager' },
|
||||
scriptSrc: ['bitrix(?:\\.info/|/js/main/core)'],
|
||||
js: [],
|
||||
implies: ['PHP'],
|
||||
meta: {},
|
||||
html: [],
|
||||
description: '1C-Bitrix is a system of web project management.',
|
||||
website: 'https://www.1c-bitrix.ru',
|
||||
cpe: '',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'React',
|
||||
cats: [12],
|
||||
cookies: {},
|
||||
headers: {},
|
||||
scriptSrc: ['react(?:-dom)?(?:\\.min)?\\.js'],
|
||||
js: ['React.version'],
|
||||
implies: [],
|
||||
meta: {},
|
||||
html: ['data-reactroot'],
|
||||
description: 'React is a JavaScript library for building user interfaces.',
|
||||
website: 'https://reactjs.org',
|
||||
cpe: 'cpe:/a:facebook:react',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'Vue.js',
|
||||
cats: [12],
|
||||
cookies: {},
|
||||
headers: {},
|
||||
scriptSrc: ['vue(?:\\.min)?\\.js'],
|
||||
js: ['Vue.version'],
|
||||
implies: [],
|
||||
meta: {},
|
||||
html: ['data-v-'],
|
||||
description: 'Vue.js is a progressive JavaScript framework.',
|
||||
website: 'https://vuejs.org',
|
||||
cpe: 'cpe:/a:vuejs:vue',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'nginx',
|
||||
cats: [22],
|
||||
cookies: {},
|
||||
headers: { server: 'nginx(?:/([\\d.]+))?\\;version:\\1' },
|
||||
scriptSrc: [],
|
||||
js: [],
|
||||
implies: [],
|
||||
meta: {},
|
||||
html: [],
|
||||
description: 'nginx is a web server.',
|
||||
website: 'http://nginx.org/en',
|
||||
cpe: 'cpe:/a:nginx:nginx',
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'WordPress',
|
||||
cats: [1, 11],
|
||||
cookies: {},
|
||||
headers: { 'x-pingback': '/xmlrpc\\.php$' },
|
||||
scriptSrc: ['/wp-(?:content|includes)/'],
|
||||
js: [],
|
||||
implies: ['PHP', 'MySQL'],
|
||||
meta: { generator: ['WordPress(?: ([\\d.]+))?\\;version:\\1'] },
|
||||
html: ['<link rel=["\']stylesheet["\'] [^>]+/wp-(?:content|includes)/'],
|
||||
description: 'WordPress is a free and open-source CMS.',
|
||||
website: 'https://wordpress.org',
|
||||
cpe: 'cpe:/a:wordpress:wordpress',
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== Fingers 指纹数据(真实数据示例)====================
|
||||
export const mockFingersFingerprints: FingersFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'jenkins',
|
||||
link: '',
|
||||
rule: [
|
||||
{
|
||||
favicon_hash: ['81586312'],
|
||||
body: 'Jenkins',
|
||||
header: 'X-Jenkins',
|
||||
},
|
||||
],
|
||||
tag: ['cloud'],
|
||||
focus: true,
|
||||
defaultPort: [8080],
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'gitlab',
|
||||
link: '',
|
||||
rule: [
|
||||
{
|
||||
favicon_hash: ['516963061', '1278323681'],
|
||||
body: 'GitLab',
|
||||
header: '_gitlab_session',
|
||||
},
|
||||
],
|
||||
tag: ['cloud'],
|
||||
focus: true,
|
||||
defaultPort: [80, 443],
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'nacos',
|
||||
link: '',
|
||||
rule: [
|
||||
{
|
||||
body: '<title>Nacos</title>',
|
||||
send_data: '/nacos/',
|
||||
},
|
||||
],
|
||||
tag: ['cloud'],
|
||||
focus: true,
|
||||
defaultPort: [8848],
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'elasticsearch',
|
||||
link: '',
|
||||
rule: [
|
||||
{
|
||||
body: '"cluster_name" : "elasticsearch"',
|
||||
vuln: 'elasticsearch_unauth',
|
||||
},
|
||||
],
|
||||
tag: ['cloud'],
|
||||
focus: true,
|
||||
defaultPort: [9200],
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'zabbix',
|
||||
link: '',
|
||||
rule: [
|
||||
{
|
||||
favicon_hash: ['892542951'],
|
||||
body: 'images/general/zabbix.ico',
|
||||
header: 'zbx_sessionid',
|
||||
send_data: '/zabbix',
|
||||
},
|
||||
],
|
||||
tag: ['cloud'],
|
||||
focus: true,
|
||||
defaultPort: [80, 443],
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== FingerPrintHub 指纹数据(真实数据示例)====================
|
||||
export const mockFingerPrintHubFingerprints: FingerPrintHubFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
fpId: 'apache-tomcat',
|
||||
name: 'Apache Tomcat',
|
||||
author: 'pdteam',
|
||||
tags: 'tech,apache,tomcat',
|
||||
severity: 'info',
|
||||
metadata: {
|
||||
product: 'tomcat',
|
||||
vendor: 'apache',
|
||||
verified: true,
|
||||
shodan_query: 'http.favicon.hash:"-297069493"',
|
||||
fofa_query: 'app="Apache-Tomcat"',
|
||||
},
|
||||
http: [
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/',
|
||||
matchers: [
|
||||
{ type: 'word', part: 'body', words: ['Apache Tomcat'] },
|
||||
{ type: 'status', status: [200] },
|
||||
],
|
||||
},
|
||||
],
|
||||
sourceFile: 'http/technologies/apache/apache-tomcat.yaml',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
fpId: 'nginx-detect',
|
||||
name: 'Nginx Server',
|
||||
author: 'pdteam',
|
||||
tags: 'tech,nginx',
|
||||
severity: 'info',
|
||||
metadata: {
|
||||
product: 'nginx',
|
||||
vendor: 'nginx',
|
||||
verified: true,
|
||||
},
|
||||
http: [
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/',
|
||||
matchers: [
|
||||
{ type: 'regex', part: 'header', regex: ['[Nn]ginx'] },
|
||||
],
|
||||
extractors: [
|
||||
{ type: 'regex', part: 'header', regex: ['nginx/([\\d.]+)'], group: 1 },
|
||||
],
|
||||
},
|
||||
],
|
||||
sourceFile: 'http/technologies/nginx/nginx-version.yaml',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
fpId: 'spring-boot-detect',
|
||||
name: 'Spring Boot',
|
||||
author: 'pdteam',
|
||||
tags: 'tech,spring,java',
|
||||
severity: 'info',
|
||||
metadata: {
|
||||
product: 'spring-boot',
|
||||
vendor: 'vmware',
|
||||
verified: true,
|
||||
},
|
||||
http: [
|
||||
{
|
||||
method: 'GET',
|
||||
path: '/',
|
||||
matchers: [
|
||||
{ type: 'word', part: 'body', words: ['Whitelabel Error Page'] },
|
||||
],
|
||||
},
|
||||
],
|
||||
sourceFile: 'http/technologies/spring/spring-boot.yaml',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== ARL 指纹数据(真实数据示例)====================
|
||||
export const mockARLFingerprints: ARLFingerprint[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'Shiro',
|
||||
rule: 'header="rememberMe="',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'ThinkPHP',
|
||||
rule: 'body="ThinkPHP" || header="ThinkPHP"',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'Fastjson',
|
||||
rule: 'body="fastjson" || body="com.alibaba.fastjson"',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'Weblogic',
|
||||
rule: 'body="WebLogic" || header="WebLogic" || body="bea_wls_internal"',
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'JBoss',
|
||||
rule: 'body="JBoss" || header="JBoss" || body="jboss.css"',
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
name: 'Struts2',
|
||||
rule: 'body=".action" || body="struts"',
|
||||
createdAt: '2024-12-20T10:05:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
// ==================== 统计数据 ====================
|
||||
export const mockFingerprintStats: FingerprintStats = {
|
||||
ehole: 1892,
|
||||
goby: 4567,
|
||||
wappalyzer: 3456,
|
||||
fingers: 2345,
|
||||
fingerprinthub: 8901,
|
||||
arl: 1234,
|
||||
}
|
||||
|
||||
// ==================== 查询函数 ====================
|
||||
export function getMockEholeFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<EholeFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockEholeFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.cms.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockGobyFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<GobyFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockGobyFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.name.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockWappalyzerFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<WappalyzerFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockWappalyzerFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.name.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockFingersFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<FingersFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockFingersFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.name.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockFingerPrintHubFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<FingerPrintHubFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockFingerPrintHubFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.name.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockARLFingerprints(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
}): PaginatedResponse<ARLFingerprint> {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockARLFingerprints
|
||||
if (filter) {
|
||||
filtered = filtered.filter(f => f.name.toLowerCase().includes(filter))
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return { results, total, page, pageSize, totalPages }
|
||||
}
|
||||
|
||||
export function getMockFingerprintStats(): FingerprintStats {
|
||||
return mockFingerprintStats
|
||||
}
|
||||
118
frontend/mock/data/ip-addresses.ts
Normal file
118
frontend/mock/data/ip-addresses.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import type { IPAddress, GetIPAddressesResponse } from '@/types/ip-address.types'
|
||||
|
||||
// 使用函数生成IP地址
|
||||
const ip = (a: number, b: number, c: number, d: number) => `${a}.${b}.${c}.${d}`
|
||||
|
||||
export const mockIPAddresses: IPAddress[] = [
|
||||
{
|
||||
ip: ip(192, 0, 2, 1),
|
||||
hosts: ['router.local', 'gateway.lan'],
|
||||
ports: [80, 443, 22, 53],
|
||||
createdAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 10),
|
||||
hosts: ['api.acme.com', 'backend.acme.com'],
|
||||
ports: [80, 443, 8080, 3306],
|
||||
createdAt: '2024-12-28T10:01:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 11),
|
||||
hosts: ['web.acme.com', 'www.acme.com'],
|
||||
ports: [80, 443],
|
||||
createdAt: '2024-12-28T10:02:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(198, 51, 100, 50),
|
||||
hosts: ['db.internal.acme.com'],
|
||||
ports: [3306, 5432, 27017],
|
||||
createdAt: '2024-12-28T10:03:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(203, 0, 113, 50),
|
||||
hosts: ['cdn.acme.com'],
|
||||
ports: [80, 443],
|
||||
createdAt: '2024-12-28T10:04:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(198, 51, 100, 10),
|
||||
hosts: ['mail.acme.com', 'smtp.acme.com'],
|
||||
ports: [25, 465, 587, 993, 995],
|
||||
createdAt: '2024-12-28T10:05:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 100),
|
||||
hosts: ['jenkins.acme.com'],
|
||||
ports: [8080, 50000],
|
||||
createdAt: '2024-12-28T10:06:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 101),
|
||||
hosts: ['gitlab.acme.com'],
|
||||
ports: [80, 443, 22],
|
||||
createdAt: '2024-12-28T10:07:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 102),
|
||||
hosts: ['k8s.acme.com', 'kubernetes.acme.com'],
|
||||
ports: [6443, 10250, 10251, 10252],
|
||||
createdAt: '2024-12-28T10:08:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 103),
|
||||
hosts: ['elastic.acme.com'],
|
||||
ports: [9200, 9300, 5601],
|
||||
createdAt: '2024-12-28T10:09:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 104),
|
||||
hosts: ['redis.acme.com'],
|
||||
ports: [6379],
|
||||
createdAt: '2024-12-28T10:10:00Z',
|
||||
},
|
||||
{
|
||||
ip: ip(192, 0, 2, 105),
|
||||
hosts: ['mq.acme.com', 'rabbitmq.acme.com'],
|
||||
ports: [5672, 15672],
|
||||
createdAt: '2024-12-28T10:11:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockIPAddresses(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
filter?: string
|
||||
targetId?: number
|
||||
scanId?: number
|
||||
}): GetIPAddressesResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const filter = params?.filter?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockIPAddresses
|
||||
|
||||
if (filter) {
|
||||
filtered = filtered.filter(
|
||||
ipAddr =>
|
||||
ipAddr.ip.toLowerCase().includes(filter) ||
|
||||
ipAddr.hosts.some(h => h.toLowerCase().includes(filter))
|
||||
)
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockIPAddressByIP(ipStr: string): IPAddress | undefined {
|
||||
return mockIPAddresses.find(addr => addr.ip === ipStr)
|
||||
}
|
||||
35
frontend/mock/data/notification-settings.ts
Normal file
35
frontend/mock/data/notification-settings.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import type {
|
||||
NotificationSettings,
|
||||
GetNotificationSettingsResponse,
|
||||
UpdateNotificationSettingsResponse,
|
||||
} from '@/types/notification-settings.types'
|
||||
|
||||
export const mockNotificationSettings: NotificationSettings = {
|
||||
discord: {
|
||||
enabled: true,
|
||||
webhookUrl: 'https://discord.com/api/webhooks/1234567890/abcdefghijklmnop',
|
||||
},
|
||||
categories: {
|
||||
scan: true,
|
||||
vulnerability: true,
|
||||
asset: true,
|
||||
system: false,
|
||||
},
|
||||
}
|
||||
|
||||
export function getMockNotificationSettings(): GetNotificationSettingsResponse {
|
||||
return mockNotificationSettings
|
||||
}
|
||||
|
||||
export function updateMockNotificationSettings(
|
||||
settings: NotificationSettings
|
||||
): UpdateNotificationSettingsResponse {
|
||||
// 模拟更新设置
|
||||
Object.assign(mockNotificationSettings, settings)
|
||||
|
||||
return {
|
||||
message: 'Notification settings updated successfully',
|
||||
discord: mockNotificationSettings.discord,
|
||||
categories: mockNotificationSettings.categories,
|
||||
}
|
||||
}
|
||||
240
frontend/mock/data/nuclei-templates.ts
Normal file
240
frontend/mock/data/nuclei-templates.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
import type {
|
||||
NucleiTemplateTreeNode,
|
||||
NucleiTemplateTreeResponse,
|
||||
NucleiTemplateContent,
|
||||
} from '@/types/nuclei.types'
|
||||
|
||||
export const mockNucleiTemplateTree: NucleiTemplateTreeNode[] = [
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'cves',
|
||||
path: 'cves',
|
||||
children: [
|
||||
{
|
||||
type: 'folder',
|
||||
name: '2024',
|
||||
path: 'cves/2024',
|
||||
children: [
|
||||
{
|
||||
type: 'file',
|
||||
name: 'CVE-2024-1234.yaml',
|
||||
path: 'cves/2024/CVE-2024-1234.yaml',
|
||||
templateId: 'CVE-2024-1234',
|
||||
severity: 'critical',
|
||||
tags: ['cve', 'rce'],
|
||||
},
|
||||
{
|
||||
type: 'file',
|
||||
name: 'CVE-2024-5678.yaml',
|
||||
path: 'cves/2024/CVE-2024-5678.yaml',
|
||||
templateId: 'CVE-2024-5678',
|
||||
severity: 'high',
|
||||
tags: ['cve', 'sqli'],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'folder',
|
||||
name: '2023',
|
||||
path: 'cves/2023',
|
||||
children: [
|
||||
{
|
||||
type: 'file',
|
||||
name: 'CVE-2023-9876.yaml',
|
||||
path: 'cves/2023/CVE-2023-9876.yaml',
|
||||
templateId: 'CVE-2023-9876',
|
||||
severity: 'high',
|
||||
tags: ['cve', 'auth-bypass'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'vulnerabilities',
|
||||
path: 'vulnerabilities',
|
||||
children: [
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'generic',
|
||||
path: 'vulnerabilities/generic',
|
||||
children: [
|
||||
{
|
||||
type: 'file',
|
||||
name: 'sqli-error-based.yaml',
|
||||
path: 'vulnerabilities/generic/sqli-error-based.yaml',
|
||||
templateId: 'sqli-error-based',
|
||||
severity: 'high',
|
||||
tags: ['sqli', 'generic'],
|
||||
},
|
||||
{
|
||||
type: 'file',
|
||||
name: 'xss-reflected.yaml',
|
||||
path: 'vulnerabilities/generic/xss-reflected.yaml',
|
||||
templateId: 'xss-reflected',
|
||||
severity: 'medium',
|
||||
tags: ['xss', 'generic'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'technologies',
|
||||
path: 'technologies',
|
||||
children: [
|
||||
{
|
||||
type: 'file',
|
||||
name: 'nginx-version.yaml',
|
||||
path: 'technologies/nginx-version.yaml',
|
||||
templateId: 'nginx-version',
|
||||
severity: 'info',
|
||||
tags: ['tech', 'nginx'],
|
||||
},
|
||||
{
|
||||
type: 'file',
|
||||
name: 'apache-detect.yaml',
|
||||
path: 'technologies/apache-detect.yaml',
|
||||
templateId: 'apache-detect',
|
||||
severity: 'info',
|
||||
tags: ['tech', 'apache'],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'exposures',
|
||||
path: 'exposures',
|
||||
children: [
|
||||
{
|
||||
type: 'folder',
|
||||
name: 'configs',
|
||||
path: 'exposures/configs',
|
||||
children: [
|
||||
{
|
||||
type: 'file',
|
||||
name: 'git-config.yaml',
|
||||
path: 'exposures/configs/git-config.yaml',
|
||||
templateId: 'git-config',
|
||||
severity: 'medium',
|
||||
tags: ['exposure', 'git'],
|
||||
},
|
||||
{
|
||||
type: 'file',
|
||||
name: 'env-file.yaml',
|
||||
path: 'exposures/configs/env-file.yaml',
|
||||
templateId: 'env-file',
|
||||
severity: 'high',
|
||||
tags: ['exposure', 'env'],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
export const mockNucleiTemplateContent: Record<string, NucleiTemplateContent> = {
|
||||
'cves/2024/CVE-2024-1234.yaml': {
|
||||
path: 'cves/2024/CVE-2024-1234.yaml',
|
||||
name: 'CVE-2024-1234.yaml',
|
||||
templateId: 'CVE-2024-1234',
|
||||
severity: 'critical',
|
||||
tags: ['cve', 'rce'],
|
||||
content: `id: CVE-2024-1234
|
||||
|
||||
info:
|
||||
name: Example RCE Vulnerability
|
||||
author: pdteam
|
||||
severity: critical
|
||||
description: |
|
||||
Example remote code execution vulnerability.
|
||||
reference:
|
||||
- https://example.com/cve-2024-1234
|
||||
tags: cve,cve2024,rce
|
||||
|
||||
http:
|
||||
- method: POST
|
||||
path:
|
||||
- "{{BaseURL}}/api/execute"
|
||||
headers:
|
||||
Content-Type: application/json
|
||||
body: '{"cmd": "id"}'
|
||||
matchers:
|
||||
- type: word
|
||||
words:
|
||||
- "uid="
|
||||
- "gid="
|
||||
condition: and
|
||||
`,
|
||||
},
|
||||
'vulnerabilities/generic/sqli-error-based.yaml': {
|
||||
path: 'vulnerabilities/generic/sqli-error-based.yaml',
|
||||
name: 'sqli-error-based.yaml',
|
||||
templateId: 'sqli-error-based',
|
||||
severity: 'high',
|
||||
tags: ['sqli', 'generic'],
|
||||
content: `id: sqli-error-based
|
||||
|
||||
info:
|
||||
name: Error Based SQL Injection
|
||||
author: pdteam
|
||||
severity: high
|
||||
tags: sqli,generic
|
||||
|
||||
http:
|
||||
- method: GET
|
||||
path:
|
||||
- "{{BaseURL}}/?id=1'"
|
||||
matchers:
|
||||
- type: word
|
||||
words:
|
||||
- "SQL syntax"
|
||||
- "mysql_fetch"
|
||||
- "You have an error"
|
||||
condition: or
|
||||
`,
|
||||
},
|
||||
'technologies/nginx-version.yaml': {
|
||||
path: 'technologies/nginx-version.yaml',
|
||||
name: 'nginx-version.yaml',
|
||||
templateId: 'nginx-version',
|
||||
severity: 'info',
|
||||
tags: ['tech', 'nginx'],
|
||||
content: `id: nginx-version
|
||||
|
||||
info:
|
||||
name: Nginx Version Detection
|
||||
author: pdteam
|
||||
severity: info
|
||||
tags: tech,nginx
|
||||
|
||||
http:
|
||||
- method: GET
|
||||
path:
|
||||
- "{{BaseURL}}/"
|
||||
matchers:
|
||||
- type: regex
|
||||
part: header
|
||||
regex:
|
||||
- "nginx/([\\d.]+)"
|
||||
extractors:
|
||||
- type: regex
|
||||
part: header
|
||||
group: 1
|
||||
regex:
|
||||
- "nginx/([\\d.]+)"
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
export function getMockNucleiTemplateTree(): NucleiTemplateTreeResponse {
|
||||
return {
|
||||
roots: mockNucleiTemplateTree,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockNucleiTemplateContent(path: string): NucleiTemplateContent | undefined {
|
||||
return mockNucleiTemplateContent[path]
|
||||
}
|
||||
154
frontend/mock/data/search.ts
Normal file
154
frontend/mock/data/search.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import type {
|
||||
SearchResponse,
|
||||
WebsiteSearchResult,
|
||||
EndpointSearchResult,
|
||||
AssetType,
|
||||
} from '@/types/search.types'
|
||||
import { mockWebsites } from './websites'
|
||||
import { mockEndpoints } from './endpoints'
|
||||
|
||||
// 将 Website 转换为搜索结果格式
|
||||
function websiteToSearchResult(website: typeof mockWebsites[0]): WebsiteSearchResult {
|
||||
return {
|
||||
id: website.id,
|
||||
url: website.url,
|
||||
host: website.host,
|
||||
title: website.title,
|
||||
technologies: website.tech || [],
|
||||
statusCode: website.statusCode,
|
||||
contentLength: website.contentLength,
|
||||
contentType: website.contentType,
|
||||
webserver: website.webserver,
|
||||
location: website.location,
|
||||
vhost: website.vhost,
|
||||
responseHeaders: {},
|
||||
responseBody: website.responseBody || '',
|
||||
createdAt: website.createdAt,
|
||||
targetId: website.target ?? 1,
|
||||
vulnerabilities: [],
|
||||
}
|
||||
}
|
||||
|
||||
// 将 Endpoint 转换为搜索结果格式
|
||||
function endpointToSearchResult(endpoint: typeof mockEndpoints[0]): EndpointSearchResult {
|
||||
return {
|
||||
id: endpoint.id,
|
||||
url: endpoint.url,
|
||||
host: endpoint.host || '',
|
||||
title: endpoint.title,
|
||||
technologies: endpoint.tech || [],
|
||||
statusCode: endpoint.statusCode,
|
||||
contentLength: endpoint.contentLength,
|
||||
contentType: endpoint.contentType || '',
|
||||
webserver: endpoint.webserver || '',
|
||||
location: endpoint.location || '',
|
||||
vhost: null,
|
||||
responseHeaders: {},
|
||||
responseBody: '',
|
||||
createdAt: endpoint.createdAt ?? null,
|
||||
targetId: 1,
|
||||
matchedGfPatterns: endpoint.gfPatterns || [],
|
||||
}
|
||||
}
|
||||
|
||||
// 解析搜索表达式
|
||||
function parseSearchQuery(query: string): { field: string; operator: string; value: string }[] {
|
||||
const conditions: { field: string; operator: string; value: string }[] = []
|
||||
|
||||
// 简单解析:field="value" 或 field=="value" 或 field!="value"
|
||||
const regex = /(\w+)(==|!=|=)"([^"]+)"/g
|
||||
let match
|
||||
while ((match = regex.exec(query)) !== null) {
|
||||
conditions.push({
|
||||
field: match[1],
|
||||
operator: match[2],
|
||||
value: match[3],
|
||||
})
|
||||
}
|
||||
|
||||
return conditions
|
||||
}
|
||||
|
||||
// 检查记录是否匹配条件
|
||||
function matchesConditions(
|
||||
record: WebsiteSearchResult | EndpointSearchResult,
|
||||
conditions: { field: string; operator: string; value: string }[]
|
||||
): boolean {
|
||||
if (conditions.length === 0) return true
|
||||
|
||||
return conditions.every(cond => {
|
||||
let fieldValue: string | number | null = null
|
||||
|
||||
switch (cond.field) {
|
||||
case 'host':
|
||||
fieldValue = record.host
|
||||
break
|
||||
case 'url':
|
||||
fieldValue = record.url
|
||||
break
|
||||
case 'title':
|
||||
fieldValue = record.title
|
||||
break
|
||||
case 'tech':
|
||||
fieldValue = record.technologies.join(',')
|
||||
break
|
||||
case 'status':
|
||||
fieldValue = String(record.statusCode)
|
||||
break
|
||||
default:
|
||||
return true
|
||||
}
|
||||
|
||||
if (fieldValue === null) return false
|
||||
const strValue = String(fieldValue).toLowerCase()
|
||||
const searchValue = cond.value.toLowerCase()
|
||||
|
||||
switch (cond.operator) {
|
||||
case '=':
|
||||
return strValue.includes(searchValue)
|
||||
case '==':
|
||||
return strValue === searchValue
|
||||
case '!=':
|
||||
return !strValue.includes(searchValue)
|
||||
default:
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function getMockSearchResults(params: {
|
||||
q?: string
|
||||
asset_type?: AssetType
|
||||
page?: number
|
||||
pageSize?: number
|
||||
}): SearchResponse {
|
||||
const { q = '', asset_type = 'website', page = 1, pageSize = 10 } = params
|
||||
|
||||
const conditions = parseSearchQuery(q)
|
||||
|
||||
let results: (WebsiteSearchResult | EndpointSearchResult)[]
|
||||
|
||||
if (asset_type === 'website') {
|
||||
results = mockWebsites
|
||||
.map(websiteToSearchResult)
|
||||
.filter(r => matchesConditions(r, conditions))
|
||||
} else {
|
||||
results = mockEndpoints
|
||||
.map(endpointToSearchResult)
|
||||
.filter(r => matchesConditions(r, conditions))
|
||||
}
|
||||
|
||||
const total = results.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const paginatedResults = results.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results: paginatedResults,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
assetType: asset_type,
|
||||
}
|
||||
}
|
||||
100
frontend/mock/data/system-logs.ts
Normal file
100
frontend/mock/data/system-logs.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import type { SystemLogResponse, LogFilesResponse, LogFile } from '@/types/system-log.types'
|
||||
|
||||
export const mockLogFiles: LogFile[] = [
|
||||
{
|
||||
filename: 'xingrin.log',
|
||||
category: 'system',
|
||||
size: 1234567,
|
||||
modifiedAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
filename: 'xingrin-error.log',
|
||||
category: 'error',
|
||||
size: 45678,
|
||||
modifiedAt: '2024-12-28T09:30:00Z',
|
||||
},
|
||||
{
|
||||
filename: 'worker.log',
|
||||
category: 'system',
|
||||
size: 234567,
|
||||
modifiedAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
filename: 'celery.log',
|
||||
category: 'system',
|
||||
size: 567890,
|
||||
modifiedAt: '2024-12-28T09:45:00Z',
|
||||
},
|
||||
{
|
||||
filename: 'nginx-access.log',
|
||||
category: 'system',
|
||||
size: 12345678,
|
||||
modifiedAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
filename: 'nginx-error.log',
|
||||
category: 'error',
|
||||
size: 23456,
|
||||
modifiedAt: '2024-12-28T08:00:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export const mockSystemLogContent = `[2024-12-28 10:00:00] INFO: Server started on port 8000
|
||||
[2024-12-28 10:00:01] INFO: Database connection established
|
||||
[2024-12-28 10:00:02] INFO: Redis connection established
|
||||
[2024-12-28 10:00:03] INFO: Worker node registered: local-worker-1
|
||||
[2024-12-28 10:00:05] INFO: Celery worker started with 4 concurrent tasks
|
||||
[2024-12-28 10:01:00] INFO: New scan task created: scan-001
|
||||
[2024-12-28 10:01:01] INFO: Task scan-001 assigned to worker local-worker-1
|
||||
[2024-12-28 10:01:05] INFO: Subdomain enumeration started for target: acme.com
|
||||
[2024-12-28 10:02:30] INFO: Found 45 subdomains for acme.com
|
||||
[2024-12-28 10:02:31] INFO: Port scanning started for 45 hosts
|
||||
[2024-12-28 10:05:00] INFO: Port scanning completed, found 123 open ports
|
||||
[2024-12-28 10:05:01] INFO: HTTP probing started for 123 endpoints
|
||||
[2024-12-28 10:08:00] INFO: HTTP probing completed, found 89 live websites
|
||||
[2024-12-28 10:08:01] INFO: Fingerprint detection started
|
||||
[2024-12-28 10:10:00] INFO: Fingerprint detection completed
|
||||
[2024-12-28 10:10:01] INFO: Vulnerability scanning started with nuclei
|
||||
[2024-12-28 10:15:00] INFO: Vulnerability scanning completed, found 5 vulnerabilities
|
||||
[2024-12-28 10:15:01] INFO: Scan task scan-001 completed successfully
|
||||
[2024-12-28 10:15:02] INFO: Results saved to database
|
||||
[2024-12-28 10:15:03] INFO: Notification sent to Discord webhook`
|
||||
|
||||
export const mockErrorLogContent = `[2024-12-28 08:30:00] ERROR: Connection refused: Redis server not responding
|
||||
[2024-12-28 08:30:01] ERROR: Retrying Redis connection in 5 seconds...
|
||||
[2024-12-28 08:30:06] INFO: Redis connection recovered
|
||||
[2024-12-28 09:15:00] WARNING: High memory usage detected (85%)
|
||||
[2024-12-28 09:15:01] INFO: Running garbage collection
|
||||
[2024-12-28 09:15:05] INFO: Memory usage reduced to 62%
|
||||
[2024-12-28 09:30:00] ERROR: Worker node disconnected: remote-worker-2
|
||||
[2024-12-28 09:30:01] WARNING: Reassigning 3 tasks from remote-worker-2
|
||||
[2024-12-28 09:30:05] INFO: Tasks reassigned successfully`
|
||||
|
||||
export function getMockLogFiles(): LogFilesResponse {
|
||||
return {
|
||||
files: mockLogFiles,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockSystemLogs(params?: {
|
||||
file?: string
|
||||
lines?: number
|
||||
}): SystemLogResponse {
|
||||
const filename = params?.file || 'xingrin.log'
|
||||
const lines = params?.lines || 100
|
||||
|
||||
let content: string
|
||||
if (filename.includes('error')) {
|
||||
content = mockErrorLogContent
|
||||
} else {
|
||||
content = mockSystemLogContent
|
||||
}
|
||||
|
||||
// 模拟行数限制
|
||||
const contentLines = content.split('\n')
|
||||
const limitedContent = contentLines.slice(-lines).join('\n')
|
||||
|
||||
return {
|
||||
content: limitedContent,
|
||||
}
|
||||
}
|
||||
149
frontend/mock/data/tools.ts
Normal file
149
frontend/mock/data/tools.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import type { Tool, GetToolsResponse } from '@/types/tool.types'
|
||||
|
||||
export const mockTools: Tool[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'subfinder',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/projectdiscovery/subfinder',
|
||||
version: 'v2.6.3',
|
||||
description: 'Fast passive subdomain enumeration tool.',
|
||||
categoryNames: ['subdomain', 'recon'],
|
||||
directory: '/opt/tools/subfinder',
|
||||
installCommand: 'go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest',
|
||||
updateCommand: 'go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest',
|
||||
versionCommand: 'subfinder -version',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
updatedAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'httpx',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/projectdiscovery/httpx',
|
||||
version: 'v1.6.0',
|
||||
description: 'Fast and multi-purpose HTTP toolkit.',
|
||||
categoryNames: ['http', 'recon'],
|
||||
directory: '/opt/tools/httpx',
|
||||
installCommand: 'go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest',
|
||||
updateCommand: 'go install -v github.com/projectdiscovery/httpx/cmd/httpx@latest',
|
||||
versionCommand: 'httpx -version',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
updatedAt: '2024-12-28T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'nuclei',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/projectdiscovery/nuclei',
|
||||
version: 'v3.1.0',
|
||||
description: 'Fast and customizable vulnerability scanner.',
|
||||
categoryNames: ['vulnerability'],
|
||||
directory: '/opt/tools/nuclei',
|
||||
installCommand: 'go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest',
|
||||
updateCommand: 'go install -v github.com/projectdiscovery/nuclei/v3/cmd/nuclei@latest',
|
||||
versionCommand: 'nuclei -version',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
updatedAt: '2024-12-28T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'naabu',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/projectdiscovery/naabu',
|
||||
version: 'v2.2.1',
|
||||
description: 'Fast port scanner written in go.',
|
||||
categoryNames: ['port', 'network'],
|
||||
directory: '/opt/tools/naabu',
|
||||
installCommand: 'go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest',
|
||||
updateCommand: 'go install -v github.com/projectdiscovery/naabu/v2/cmd/naabu@latest',
|
||||
versionCommand: 'naabu -version',
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
updatedAt: '2024-12-28T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'katana',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/projectdiscovery/katana',
|
||||
version: 'v1.0.4',
|
||||
description: 'Next-generation crawling and spidering framework.',
|
||||
categoryNames: ['crawler', 'recon'],
|
||||
directory: '/opt/tools/katana',
|
||||
installCommand: 'go install github.com/projectdiscovery/katana/cmd/katana@latest',
|
||||
updateCommand: 'go install github.com/projectdiscovery/katana/cmd/katana@latest',
|
||||
versionCommand: 'katana -version',
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
updatedAt: '2024-12-28T10:04:00Z',
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
name: 'ffuf',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/ffuf/ffuf',
|
||||
version: 'v2.1.0',
|
||||
description: 'Fast web fuzzer written in Go.',
|
||||
categoryNames: ['directory', 'fuzzer'],
|
||||
directory: '/opt/tools/ffuf',
|
||||
installCommand: 'go install github.com/ffuf/ffuf/v2@latest',
|
||||
updateCommand: 'go install github.com/ffuf/ffuf/v2@latest',
|
||||
versionCommand: 'ffuf -V',
|
||||
createdAt: '2024-12-20T10:05:00Z',
|
||||
updatedAt: '2024-12-28T10:05:00Z',
|
||||
},
|
||||
{
|
||||
id: 7,
|
||||
name: 'amass',
|
||||
type: 'opensource',
|
||||
repoUrl: 'https://github.com/owasp-amass/amass',
|
||||
version: 'v4.2.0',
|
||||
description: 'In-depth attack surface mapping and asset discovery.',
|
||||
categoryNames: ['subdomain', 'recon'],
|
||||
directory: '/opt/tools/amass',
|
||||
installCommand: 'go install -v github.com/owasp-amass/amass/v4/...@master',
|
||||
updateCommand: 'go install -v github.com/owasp-amass/amass/v4/...@master',
|
||||
versionCommand: 'amass -version',
|
||||
createdAt: '2024-12-20T10:06:00Z',
|
||||
updatedAt: '2024-12-28T10:06:00Z',
|
||||
},
|
||||
{
|
||||
id: 8,
|
||||
name: 'xingfinger',
|
||||
type: 'custom',
|
||||
repoUrl: '',
|
||||
version: '1.0.0',
|
||||
description: '自定义指纹识别工具',
|
||||
categoryNames: ['recon'],
|
||||
directory: '/opt/tools/xingfinger',
|
||||
installCommand: '',
|
||||
updateCommand: '',
|
||||
versionCommand: '',
|
||||
createdAt: '2024-12-20T10:07:00Z',
|
||||
updatedAt: '2024-12-28T10:07:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockTools(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
}): GetToolsResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
|
||||
const total = mockTools.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const tools = mockTools.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
tools,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockToolById(id: number): Tool | undefined {
|
||||
return mockTools.find(t => t.id === id)
|
||||
}
|
||||
119
frontend/mock/data/wordlists.ts
Normal file
119
frontend/mock/data/wordlists.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import type { Wordlist, GetWordlistsResponse } from '@/types/wordlist.types'
|
||||
|
||||
export const mockWordlists: Wordlist[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'common-dirs.txt',
|
||||
description: '常用目录字典',
|
||||
fileSize: 45678,
|
||||
lineCount: 4567,
|
||||
fileHash: 'a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6',
|
||||
createdAt: '2024-12-20T10:00:00Z',
|
||||
updatedAt: '2024-12-28T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'subdomains-top1million.txt',
|
||||
description: 'Top 100万子域名字典',
|
||||
fileSize: 12345678,
|
||||
lineCount: 1000000,
|
||||
fileHash: 'b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7',
|
||||
createdAt: '2024-12-20T10:01:00Z',
|
||||
updatedAt: '2024-12-28T10:01:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'api-endpoints.txt',
|
||||
description: 'API 端点字典',
|
||||
fileSize: 23456,
|
||||
lineCount: 2345,
|
||||
fileHash: 'c3d4e5f6g7h8i9j0k1l2m3n4o5p6q7r8',
|
||||
createdAt: '2024-12-20T10:02:00Z',
|
||||
updatedAt: '2024-12-28T10:02:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'params.txt',
|
||||
description: '常用参数名字典',
|
||||
fileSize: 8901,
|
||||
lineCount: 890,
|
||||
fileHash: 'd4e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9',
|
||||
createdAt: '2024-12-20T10:03:00Z',
|
||||
updatedAt: '2024-12-28T10:03:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'sensitive-files.txt',
|
||||
description: '敏感文件字典',
|
||||
fileSize: 5678,
|
||||
lineCount: 567,
|
||||
fileHash: 'e5f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0',
|
||||
createdAt: '2024-12-20T10:04:00Z',
|
||||
updatedAt: '2024-12-28T10:04:00Z',
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
name: 'raft-large-directories.txt',
|
||||
description: 'RAFT 大型目录字典',
|
||||
fileSize: 987654,
|
||||
lineCount: 98765,
|
||||
fileHash: 'f6g7h8i9j0k1l2m3n4o5p6q7r8s9t0u1',
|
||||
createdAt: '2024-12-20T10:05:00Z',
|
||||
updatedAt: '2024-12-28T10:05:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export const mockWordlistContent = `admin
|
||||
api
|
||||
backup
|
||||
config
|
||||
dashboard
|
||||
debug
|
||||
dev
|
||||
docs
|
||||
download
|
||||
files
|
||||
images
|
||||
js
|
||||
login
|
||||
logs
|
||||
manager
|
||||
private
|
||||
public
|
||||
static
|
||||
test
|
||||
upload
|
||||
users
|
||||
v1
|
||||
v2
|
||||
wp-admin
|
||||
wp-content`
|
||||
|
||||
export function getMockWordlists(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
}): GetWordlistsResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
|
||||
const total = mockWordlists.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = mockWordlists.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockWordlistById(id: number): Wordlist | undefined {
|
||||
return mockWordlists.find(w => w.id === id)
|
||||
}
|
||||
|
||||
export function getMockWordlistContent(): string {
|
||||
return mockWordlistContent
|
||||
}
|
||||
@@ -105,3 +105,80 @@ export {
|
||||
getMockScheduledScans,
|
||||
getMockScheduledScanById,
|
||||
} from './data/scheduled-scans'
|
||||
|
||||
// Directories
|
||||
export {
|
||||
mockDirectories,
|
||||
getMockDirectories,
|
||||
getMockDirectoryById,
|
||||
} from './data/directories'
|
||||
|
||||
// Fingerprints
|
||||
export {
|
||||
mockEholeFingerprints,
|
||||
mockGobyFingerprints,
|
||||
mockWappalyzerFingerprints,
|
||||
mockFingersFingerprints,
|
||||
mockFingerPrintHubFingerprints,
|
||||
mockARLFingerprints,
|
||||
mockFingerprintStats,
|
||||
getMockEholeFingerprints,
|
||||
getMockGobyFingerprints,
|
||||
getMockWappalyzerFingerprints,
|
||||
getMockFingersFingerprints,
|
||||
getMockFingerPrintHubFingerprints,
|
||||
getMockARLFingerprints,
|
||||
getMockFingerprintStats,
|
||||
} from './data/fingerprints'
|
||||
|
||||
// IP Addresses
|
||||
export {
|
||||
mockIPAddresses,
|
||||
getMockIPAddresses,
|
||||
getMockIPAddressByIP,
|
||||
} from './data/ip-addresses'
|
||||
|
||||
// Search
|
||||
export {
|
||||
getMockSearchResults,
|
||||
} from './data/search'
|
||||
|
||||
// Tools
|
||||
export {
|
||||
mockTools,
|
||||
getMockTools,
|
||||
getMockToolById,
|
||||
} from './data/tools'
|
||||
|
||||
// Wordlists
|
||||
export {
|
||||
mockWordlists,
|
||||
mockWordlistContent,
|
||||
getMockWordlists,
|
||||
getMockWordlistById,
|
||||
getMockWordlistContent,
|
||||
} from './data/wordlists'
|
||||
|
||||
// Nuclei Templates
|
||||
export {
|
||||
mockNucleiTemplateTree,
|
||||
mockNucleiTemplateContent,
|
||||
getMockNucleiTemplateTree,
|
||||
getMockNucleiTemplateContent,
|
||||
} from './data/nuclei-templates'
|
||||
|
||||
// System Logs
|
||||
export {
|
||||
mockLogFiles,
|
||||
mockSystemLogContent,
|
||||
mockErrorLogContent,
|
||||
getMockLogFiles,
|
||||
getMockSystemLogs,
|
||||
} from './data/system-logs'
|
||||
|
||||
// Notification Settings
|
||||
export {
|
||||
mockNotificationSettings,
|
||||
getMockNotificationSettings,
|
||||
updateMockNotificationSettings,
|
||||
} from './data/notification-settings'
|
||||
|
||||
14
frontend/services/api-key-settings.service.ts
Normal file
14
frontend/services/api-key-settings.service.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { api } from '@/lib/api-client'
|
||||
import type { ApiKeySettings } from '@/types/api-key-settings.types'
|
||||
|
||||
export class ApiKeySettingsService {
|
||||
static async getSettings(): Promise<ApiKeySettings> {
|
||||
const res = await api.get<ApiKeySettings>('/settings/api-keys/')
|
||||
return res.data
|
||||
}
|
||||
|
||||
static async updateSettings(data: Partial<ApiKeySettings>): Promise<ApiKeySettings> {
|
||||
const res = await api.put<ApiKeySettings>('/settings/api-keys/', data)
|
||||
return res.data
|
||||
}
|
||||
}
|
||||
25
frontend/services/global-blacklist.service.ts
Normal file
25
frontend/services/global-blacklist.service.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { api } from '@/lib/api-client'
|
||||
|
||||
export interface GlobalBlacklistResponse {
|
||||
patterns: string[]
|
||||
}
|
||||
|
||||
export interface UpdateGlobalBlacklistRequest {
|
||||
patterns: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Get global blacklist rules
|
||||
*/
|
||||
export async function getGlobalBlacklist(): Promise<GlobalBlacklistResponse> {
|
||||
const res = await api.get<GlobalBlacklistResponse>('/blacklist/rules/')
|
||||
return res.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Update global blacklist rules (full replace)
|
||||
*/
|
||||
export async function updateGlobalBlacklist(data: UpdateGlobalBlacklistRequest): Promise<GlobalBlacklistResponse> {
|
||||
const res = await api.put<GlobalBlacklistResponse>('/blacklist/rules/', data)
|
||||
return res.data
|
||||
}
|
||||
@@ -10,12 +10,26 @@ import { USE_MOCK, mockDelay, getMockScheduledScans, getMockScheduledScanById }
|
||||
/**
|
||||
* Get scheduled scan list
|
||||
*/
|
||||
export async function getScheduledScans(params?: { page?: number; pageSize?: number; search?: string }): Promise<GetScheduledScansResponse> {
|
||||
export async function getScheduledScans(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
search?: string
|
||||
targetId?: number
|
||||
organizationId?: number
|
||||
}): Promise<GetScheduledScansResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockScheduledScans(params)
|
||||
}
|
||||
const res = await api.get<GetScheduledScansResponse>('/scheduled-scans/', { params })
|
||||
// Convert camelCase to snake_case for query params (djangorestframework-camel-case doesn't convert query params)
|
||||
const apiParams: Record<string, unknown> = {}
|
||||
if (params?.page) apiParams.page = params.page
|
||||
if (params?.pageSize) apiParams.pageSize = params.pageSize
|
||||
if (params?.search) apiParams.search = params.search
|
||||
if (params?.targetId) apiParams.target_id = params.targetId
|
||||
if (params?.organizationId) apiParams.organization_id = params.organizationId
|
||||
|
||||
const res = await api.get<GetScheduledScansResponse>('/scheduled-scans/', { params: apiParams })
|
||||
return res.data
|
||||
}
|
||||
|
||||
|
||||
@@ -159,3 +159,22 @@ export async function getTargetEndpoints(
|
||||
return response.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Get target's blacklist rules
|
||||
*/
|
||||
export async function getTargetBlacklist(id: number): Promise<{ patterns: string[] }> {
|
||||
const response = await api.get<{ patterns: string[] }>(`/targets/${id}/blacklist/`)
|
||||
return response.data
|
||||
}
|
||||
|
||||
/**
|
||||
* Update target's blacklist rules (full replace)
|
||||
*/
|
||||
export async function updateTargetBlacklist(
|
||||
id: number,
|
||||
patterns: string[]
|
||||
): Promise<{ count: number }> {
|
||||
const response = await api.put<{ count: number }>(`/targets/${id}/blacklist/`, { patterns })
|
||||
return response.data
|
||||
}
|
||||
|
||||
|
||||
42
frontend/types/api-key-settings.types.ts
Normal file
42
frontend/types/api-key-settings.types.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* API Key 配置类型定义
|
||||
* 用于 subfinder 第三方数据源配置
|
||||
*/
|
||||
|
||||
// 单字段 Provider 配置(hunter, shodan, zoomeye, securitytrails, threatbook, quake)
|
||||
export interface SingleFieldProviderConfig {
|
||||
enabled: boolean
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
// FOFA Provider 配置(email + apiKey)
|
||||
export interface FofaProviderConfig {
|
||||
enabled: boolean
|
||||
email: string
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
// Censys Provider 配置(apiId + apiSecret)
|
||||
export interface CensysProviderConfig {
|
||||
enabled: boolean
|
||||
apiId: string
|
||||
apiSecret: string
|
||||
}
|
||||
|
||||
// 完整的 API Key 配置
|
||||
export interface ApiKeySettings {
|
||||
fofa: FofaProviderConfig
|
||||
hunter: SingleFieldProviderConfig
|
||||
shodan: SingleFieldProviderConfig
|
||||
censys: CensysProviderConfig
|
||||
zoomeye: SingleFieldProviderConfig
|
||||
securitytrails: SingleFieldProviderConfig
|
||||
threatbook: SingleFieldProviderConfig
|
||||
quake: SingleFieldProviderConfig
|
||||
}
|
||||
|
||||
// Provider 类型
|
||||
export type ProviderKey = keyof ApiKeySettings
|
||||
|
||||
// Provider 配置联合类型
|
||||
export type ProviderConfig = FofaProviderConfig | CensysProviderConfig | SingleFieldProviderConfig
|
||||
@@ -66,6 +66,7 @@ export interface GetScansParams {
|
||||
pageSize?: number
|
||||
status?: ScanStatus
|
||||
search?: string
|
||||
target?: number // Filter by target ID
|
||||
}
|
||||
|
||||
export interface GetScansResponse {
|
||||
|
||||
Reference in New Issue
Block a user