mirror of
https://github.com/yyhuni/xingrin.git
synced 2026-01-31 19:53:11 +08:00
Compare commits
14 Commits
v1.3.0-dev
...
v1.3.6-dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b4f9862bf | ||
|
|
1c42e4978f | ||
|
|
57bab63997 | ||
|
|
b1f0f18ac0 | ||
|
|
ccee5471b8 | ||
|
|
0ccd362535 | ||
|
|
7f2af7f7e2 | ||
|
|
4bd0f9e8c1 | ||
|
|
68cc996e3b | ||
|
|
f1e79d638e | ||
|
|
d484133e4c | ||
|
|
fc977ae029 | ||
|
|
f328474404 | ||
|
|
68e726a066 |
4
.github/workflows/docker-build.yml
vendored
4
.github/workflows/docker-build.yml
vendored
@@ -44,6 +44,10 @@ jobs:
|
||||
dockerfile: docker/agent/Dockerfile
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- image: xingrin-postgres
|
||||
dockerfile: docker/postgres/Dockerfile
|
||||
context: docker/postgres
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
32
README.md
32
README.md
@@ -13,14 +13,14 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="#-功能特性">功能特性</a> •
|
||||
<a href="#-全局资产搜索">资产搜索</a> •
|
||||
<a href="#-快速开始">快速开始</a> •
|
||||
<a href="#-文档">文档</a> •
|
||||
<a href="#-技术栈">技术栈</a> •
|
||||
<a href="#-反馈与贡献">反馈与贡献</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<sub>🔍 关键词: ASM | 攻击面管理 | 漏洞扫描 | 资产发现 | Bug Bounty | 渗透测试 | Nuclei | 子域名枚举 | EASM</sub>
|
||||
<sub>🔍 关键词: ASM | 攻击面管理 | 漏洞扫描 | 资产发现 | 资产搜索 | Bug Bounty | 渗透测试 | Nuclei | 子域名枚举 | EASM</sub>
|
||||
</p>
|
||||
|
||||
---
|
||||
@@ -162,9 +162,34 @@ flowchart TB
|
||||
W3 -.心跳上报.-> REDIS
|
||||
```
|
||||
|
||||
### 🔎 全局资产搜索
|
||||
- **多类型搜索** - 支持 Website 和 Endpoint 两种资产类型
|
||||
- **表达式语法** - 支持 `=`(模糊)、`==`(精确)、`!=`(不等于)操作符
|
||||
- **逻辑组合** - 支持 `&&` (AND) 和 `||` (OR) 逻辑组合
|
||||
- **多字段查询** - 支持 host、url、title、tech、status、body、header 字段
|
||||
- **CSV 导出** - 流式导出全部搜索结果,无数量限制
|
||||
|
||||
#### 搜索语法示例
|
||||
|
||||
```bash
|
||||
# 基础搜索
|
||||
host="api" # host 包含 "api"
|
||||
status=="200" # 状态码精确等于 200
|
||||
tech="nginx" # 技术栈包含 nginx
|
||||
|
||||
# 组合搜索
|
||||
host="api" && status=="200" # host 包含 api 且状态码为 200
|
||||
tech="vue" || tech="react" # 技术栈包含 vue 或 react
|
||||
|
||||
# 复杂查询
|
||||
host="admin" && tech="php" && status=="200"
|
||||
url="/api/v1" && status!="404"
|
||||
```
|
||||
|
||||
### 📊 可视化界面
|
||||
- **数据统计** - 资产/漏洞统计仪表盘
|
||||
- **实时通知** - WebSocket 消息推送
|
||||
- **通知推送** - 实时企业微信,tg,discard消息推送服务
|
||||
|
||||
---
|
||||
|
||||
@@ -172,7 +197,7 @@ flowchart TB
|
||||
|
||||
### 环境要求
|
||||
|
||||
- **操作系统**: Ubuntu 20.04+ / Debian 11+ (推荐)
|
||||
- **操作系统**: Ubuntu 20.04+ / Debian 11+
|
||||
- **硬件**: 2核 4G 内存起步,20GB+ 磁盘空间
|
||||
|
||||
### 一键安装
|
||||
@@ -197,6 +222,7 @@ sudo ./install.sh --mirror
|
||||
### 访问服务
|
||||
|
||||
- **Web 界面**: `https://ip:8083`
|
||||
- **默认账号**: admin / admin(首次登录后请修改密码)
|
||||
|
||||
### 常用命令
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ class EndpointSnapshotDTO:
|
||||
快照只属于 scan。
|
||||
"""
|
||||
scan_id: int
|
||||
target_id: int # 必填,用于同步到资产表
|
||||
url: str
|
||||
host: str = '' # 主机名(域名或IP地址)
|
||||
title: str = ''
|
||||
@@ -25,7 +26,6 @@ class EndpointSnapshotDTO:
|
||||
response_body: str = ''
|
||||
vhost: Optional[bool] = None
|
||||
matched_gf_patterns: List[str] = None
|
||||
target_id: Optional[int] = None # 冗余字段,用于同步到资产表
|
||||
response_headers: str = ''
|
||||
|
||||
def __post_init__(self):
|
||||
@@ -43,9 +43,6 @@ class EndpointSnapshotDTO:
|
||||
"""
|
||||
from apps.asset.dtos.asset import EndpointDTO
|
||||
|
||||
if self.target_id is None:
|
||||
raise ValueError("target_id 不能为 None,无法同步到资产表")
|
||||
|
||||
return EndpointDTO(
|
||||
target_id=self.target_id,
|
||||
url=self.url,
|
||||
|
||||
@@ -13,14 +13,14 @@ class WebsiteSnapshotDTO:
|
||||
快照只属于 scan,target 信息通过 scan.target 获取。
|
||||
"""
|
||||
scan_id: int
|
||||
target_id: int # 仅用于传递数据,不保存到数据库
|
||||
target_id: int # 必填,用于同步到资产表
|
||||
url: str
|
||||
host: str
|
||||
title: str = ''
|
||||
status: Optional[int] = None
|
||||
status_code: Optional[int] = None # 统一命名:status -> status_code
|
||||
content_length: Optional[int] = None
|
||||
location: str = ''
|
||||
web_server: str = ''
|
||||
webserver: str = '' # 统一命名:web_server -> webserver
|
||||
content_type: str = ''
|
||||
tech: List[str] = None
|
||||
response_body: str = ''
|
||||
@@ -45,10 +45,10 @@ class WebsiteSnapshotDTO:
|
||||
url=self.url,
|
||||
host=self.host,
|
||||
title=self.title,
|
||||
status_code=self.status,
|
||||
status_code=self.status_code,
|
||||
content_length=self.content_length,
|
||||
location=self.location,
|
||||
webserver=self.web_server,
|
||||
webserver=self.webserver,
|
||||
content_type=self.content_type,
|
||||
tech=self.tech if self.tech else [],
|
||||
response_body=self.response_body,
|
||||
|
||||
@@ -116,14 +116,14 @@ class Migration(migrations.Migration):
|
||||
name='Endpoint',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('url', models.CharField(help_text='最终访问的完整URL', max_length=2000)),
|
||||
('url', models.TextField(help_text='最终访问的完整URL')),
|
||||
('host', models.CharField(blank=True, default='', help_text='主机名(域名或IP地址)', max_length=253)),
|
||||
('location', models.CharField(blank=True, default='', help_text='重定向地址(HTTP 3xx 响应头 Location)', max_length=1000)),
|
||||
('location', models.TextField(blank=True, default='', help_text='重定向地址(HTTP 3xx 响应头 Location)')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, help_text='创建时间')),
|
||||
('title', models.CharField(blank=True, default='', help_text='网页标题(HTML <title> 标签内容)', max_length=1000)),
|
||||
('webserver', models.CharField(blank=True, default='', help_text='服务器类型(HTTP 响应头 Server 值)', max_length=200)),
|
||||
('title', models.TextField(blank=True, default='', help_text='网页标题(HTML <title> 标签内容)')),
|
||||
('webserver', models.TextField(blank=True, default='', help_text='服务器类型(HTTP 响应头 Server 值)')),
|
||||
('response_body', models.TextField(blank=True, default='', help_text='HTTP响应体')),
|
||||
('content_type', models.CharField(blank=True, default='', help_text='响应类型(HTTP Content-Type 响应头)', max_length=200)),
|
||||
('content_type', models.TextField(blank=True, default='', help_text='响应类型(HTTP Content-Type 响应头)')),
|
||||
('tech', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), blank=True, default=list, help_text='技术栈(服务器/框架/语言等)', size=None)),
|
||||
('status_code', models.IntegerField(blank=True, help_text='HTTP状态码', null=True)),
|
||||
('content_length', models.IntegerField(blank=True, help_text='响应体大小(单位字节)', null=True)),
|
||||
@@ -145,14 +145,14 @@ class Migration(migrations.Migration):
|
||||
name='EndpointSnapshot',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('url', models.CharField(help_text='端点URL', max_length=2000)),
|
||||
('url', models.TextField(help_text='端点URL')),
|
||||
('host', models.CharField(blank=True, default='', help_text='主机名(域名或IP地址)', max_length=253)),
|
||||
('title', models.CharField(blank=True, default='', help_text='页面标题', max_length=1000)),
|
||||
('title', models.TextField(blank=True, default='', help_text='页面标题')),
|
||||
('status_code', models.IntegerField(blank=True, help_text='HTTP状态码', null=True)),
|
||||
('content_length', models.IntegerField(blank=True, help_text='内容长度', null=True)),
|
||||
('location', models.CharField(blank=True, default='', help_text='重定向位置', max_length=1000)),
|
||||
('webserver', models.CharField(blank=True, default='', help_text='Web服务器', max_length=200)),
|
||||
('content_type', models.CharField(blank=True, default='', help_text='内容类型', max_length=200)),
|
||||
('location', models.TextField(blank=True, default='', help_text='重定向位置')),
|
||||
('webserver', models.TextField(blank=True, default='', help_text='Web服务器')),
|
||||
('content_type', models.TextField(blank=True, default='', help_text='内容类型')),
|
||||
('tech', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), blank=True, default=list, help_text='技术栈', size=None)),
|
||||
('response_body', models.TextField(blank=True, default='', help_text='HTTP响应体')),
|
||||
('vhost', models.BooleanField(blank=True, help_text='虚拟主机标志', null=True)),
|
||||
@@ -290,14 +290,14 @@ class Migration(migrations.Migration):
|
||||
name='WebSite',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('url', models.CharField(help_text='最终访问的完整URL', max_length=2000)),
|
||||
('url', models.TextField(help_text='最终访问的完整URL')),
|
||||
('host', models.CharField(blank=True, default='', help_text='主机名(域名或IP地址)', max_length=253)),
|
||||
('location', models.CharField(blank=True, default='', help_text='重定向地址(HTTP 3xx 响应头 Location)', max_length=1000)),
|
||||
('location', models.TextField(blank=True, default='', help_text='重定向地址(HTTP 3xx 响应头 Location)')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, help_text='创建时间')),
|
||||
('title', models.CharField(blank=True, default='', help_text='网页标题(HTML <title> 标签内容)', max_length=1000)),
|
||||
('webserver', models.CharField(blank=True, default='', help_text='服务器类型(HTTP 响应头 Server 值)', max_length=200)),
|
||||
('title', models.TextField(blank=True, default='', help_text='网页标题(HTML <title> 标签内容)')),
|
||||
('webserver', models.TextField(blank=True, default='', help_text='服务器类型(HTTP 响应头 Server 值)')),
|
||||
('response_body', models.TextField(blank=True, default='', help_text='HTTP响应体')),
|
||||
('content_type', models.CharField(blank=True, default='', help_text='响应类型(HTTP Content-Type 响应头)', max_length=200)),
|
||||
('content_type', models.TextField(blank=True, default='', help_text='响应类型(HTTP Content-Type 响应头)')),
|
||||
('tech', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), blank=True, default=list, help_text='技术栈(服务器/框架/语言等)', size=None)),
|
||||
('status_code', models.IntegerField(blank=True, help_text='HTTP状态码', null=True)),
|
||||
('content_length', models.IntegerField(blank=True, help_text='响应体大小(单位字节)', null=True)),
|
||||
@@ -318,14 +318,14 @@ class Migration(migrations.Migration):
|
||||
name='WebsiteSnapshot',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('url', models.CharField(help_text='站点URL', max_length=2000)),
|
||||
('url', models.TextField(help_text='站点URL')),
|
||||
('host', models.CharField(blank=True, default='', help_text='主机名(域名或IP地址)', max_length=253)),
|
||||
('title', models.CharField(blank=True, default='', help_text='页面标题', max_length=500)),
|
||||
('status', models.IntegerField(blank=True, help_text='HTTP状态码', null=True)),
|
||||
('title', models.TextField(blank=True, default='', help_text='页面标题')),
|
||||
('status_code', models.IntegerField(blank=True, help_text='HTTP状态码', null=True)),
|
||||
('content_length', models.BigIntegerField(blank=True, help_text='内容长度', null=True)),
|
||||
('location', models.CharField(blank=True, default='', help_text='重定向位置', max_length=1000)),
|
||||
('web_server', models.CharField(blank=True, default='', help_text='Web服务器', max_length=200)),
|
||||
('content_type', models.CharField(blank=True, default='', help_text='内容类型', max_length=200)),
|
||||
('location', models.TextField(blank=True, default='', help_text='重定向位置')),
|
||||
('webserver', models.TextField(blank=True, default='', help_text='Web服务器')),
|
||||
('content_type', models.TextField(blank=True, default='', help_text='内容类型')),
|
||||
('tech', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), blank=True, default=list, help_text='技术栈', size=None)),
|
||||
('response_body', models.TextField(blank=True, default='', help_text='HTTP响应体')),
|
||||
('vhost', models.BooleanField(blank=True, help_text='虚拟主机标志', null=True)),
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
创建资产搜索 IMMV(增量维护物化视图)
|
||||
|
||||
使用 pg_ivm 扩展创建 IMMV,数据变更时自动增量更新,无需手动刷新。
|
||||
|
||||
包含:
|
||||
1. asset_search_view - Website 搜索视图
|
||||
2. endpoint_search_view - Endpoint 搜索视图
|
||||
"""
|
||||
|
||||
from django.db import migrations
|
||||
@@ -20,7 +24,9 @@ class Migration(migrations.Migration):
|
||||
reverse_sql="-- pg_ivm extension kept for other uses"
|
||||
),
|
||||
|
||||
# 2. 使用 pg_ivm 创建 IMMV
|
||||
# ==================== Website IMMV ====================
|
||||
|
||||
# 2. 创建 asset_search_view IMMV
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
SELECT pgivm.create_immv('asset_search_view', $$
|
||||
@@ -33,6 +39,11 @@ class Migration(migrations.Migration):
|
||||
w.status_code,
|
||||
w.response_headers,
|
||||
w.response_body,
|
||||
w.content_type,
|
||||
w.content_length,
|
||||
w.webserver,
|
||||
w.location,
|
||||
w.vhost,
|
||||
w.created_at,
|
||||
w.target_id
|
||||
FROM website w
|
||||
@@ -41,20 +52,13 @@ class Migration(migrations.Migration):
|
||||
reverse_sql="SELECT pgivm.drop_immv('asset_search_view');"
|
||||
),
|
||||
|
||||
# 3. 创建唯一索引(用于标识)
|
||||
# 3. 创建 asset_search_view 索引
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- 唯一索引
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS asset_search_view_id_idx
|
||||
ON asset_search_view (id);
|
||||
""",
|
||||
reverse_sql="""
|
||||
DROP INDEX IF EXISTS asset_search_view_id_idx;
|
||||
"""
|
||||
),
|
||||
|
||||
# 4. 创建搜索优化索引
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
|
||||
-- host 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS asset_search_view_host_trgm_idx
|
||||
ON asset_search_view USING gin (host gin_trgm_ops);
|
||||
@@ -88,6 +92,7 @@ class Migration(migrations.Migration):
|
||||
ON asset_search_view (created_at DESC);
|
||||
""",
|
||||
reverse_sql="""
|
||||
DROP INDEX IF EXISTS asset_search_view_id_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_host_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_title_trgm_idx;
|
||||
DROP INDEX IF EXISTS asset_search_view_url_trgm_idx;
|
||||
@@ -98,4 +103,85 @@ class Migration(migrations.Migration):
|
||||
DROP INDEX IF EXISTS asset_search_view_created_idx;
|
||||
"""
|
||||
),
|
||||
|
||||
# ==================== Endpoint IMMV ====================
|
||||
|
||||
# 4. 创建 endpoint_search_view IMMV
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
SELECT pgivm.create_immv('endpoint_search_view', $$
|
||||
SELECT
|
||||
e.id,
|
||||
e.url,
|
||||
e.host,
|
||||
e.title,
|
||||
e.tech,
|
||||
e.status_code,
|
||||
e.response_headers,
|
||||
e.response_body,
|
||||
e.content_type,
|
||||
e.content_length,
|
||||
e.webserver,
|
||||
e.location,
|
||||
e.vhost,
|
||||
e.matched_gf_patterns,
|
||||
e.created_at,
|
||||
e.target_id
|
||||
FROM endpoint e
|
||||
$$);
|
||||
""",
|
||||
reverse_sql="SELECT pgivm.drop_immv('endpoint_search_view');"
|
||||
),
|
||||
|
||||
# 5. 创建 endpoint_search_view 索引
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- 唯一索引
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS endpoint_search_view_id_idx
|
||||
ON endpoint_search_view (id);
|
||||
|
||||
-- host 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_host_trgm_idx
|
||||
ON endpoint_search_view USING gin (host gin_trgm_ops);
|
||||
|
||||
-- title 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_title_trgm_idx
|
||||
ON endpoint_search_view USING gin (title gin_trgm_ops);
|
||||
|
||||
-- url 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_url_trgm_idx
|
||||
ON endpoint_search_view USING gin (url gin_trgm_ops);
|
||||
|
||||
-- response_headers 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_headers_trgm_idx
|
||||
ON endpoint_search_view USING gin (response_headers gin_trgm_ops);
|
||||
|
||||
-- response_body 模糊搜索索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_body_trgm_idx
|
||||
ON endpoint_search_view USING gin (response_body gin_trgm_ops);
|
||||
|
||||
-- tech 数组索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_tech_idx
|
||||
ON endpoint_search_view USING gin (tech);
|
||||
|
||||
-- status_code 索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_status_idx
|
||||
ON endpoint_search_view (status_code);
|
||||
|
||||
-- created_at 排序索引
|
||||
CREATE INDEX IF NOT EXISTS endpoint_search_view_created_idx
|
||||
ON endpoint_search_view (created_at DESC);
|
||||
""",
|
||||
reverse_sql="""
|
||||
DROP INDEX IF EXISTS endpoint_search_view_id_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_host_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_title_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_url_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_headers_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_body_trgm_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_tech_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_status_idx;
|
||||
DROP INDEX IF EXISTS endpoint_search_view_created_idx;
|
||||
"""
|
||||
),
|
||||
]
|
||||
|
||||
@@ -65,28 +65,25 @@ class Endpoint(models.Model):
|
||||
help_text='所属的扫描目标(主关联字段,表示所属关系,不能为空)'
|
||||
)
|
||||
|
||||
url = models.CharField(max_length=2000, help_text='最终访问的完整URL')
|
||||
url = models.TextField(help_text='最终访问的完整URL')
|
||||
host = models.CharField(
|
||||
max_length=253,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='主机名(域名或IP地址)'
|
||||
)
|
||||
location = models.CharField(
|
||||
max_length=1000,
|
||||
location = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='重定向地址(HTTP 3xx 响应头 Location)'
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
|
||||
title = models.CharField(
|
||||
max_length=1000,
|
||||
title = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='网页标题(HTML <title> 标签内容)'
|
||||
)
|
||||
webserver = models.CharField(
|
||||
max_length=200,
|
||||
webserver = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='服务器类型(HTTP 响应头 Server 值)'
|
||||
@@ -96,8 +93,7 @@ class Endpoint(models.Model):
|
||||
default='',
|
||||
help_text='HTTP响应体'
|
||||
)
|
||||
content_type = models.CharField(
|
||||
max_length=200,
|
||||
content_type = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='响应类型(HTTP Content-Type 响应头)'
|
||||
@@ -188,28 +184,25 @@ class WebSite(models.Model):
|
||||
help_text='所属的扫描目标(主关联字段,表示所属关系,不能为空)'
|
||||
)
|
||||
|
||||
url = models.CharField(max_length=2000, help_text='最终访问的完整URL')
|
||||
url = models.TextField(help_text='最终访问的完整URL')
|
||||
host = models.CharField(
|
||||
max_length=253,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='主机名(域名或IP地址)'
|
||||
)
|
||||
location = models.CharField(
|
||||
max_length=1000,
|
||||
location = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='重定向地址(HTTP 3xx 响应头 Location)'
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True, help_text='创建时间')
|
||||
title = models.CharField(
|
||||
max_length=1000,
|
||||
title = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='网页标题(HTML <title> 标签内容)'
|
||||
)
|
||||
webserver = models.CharField(
|
||||
max_length=200,
|
||||
webserver = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='服务器类型(HTTP 响应头 Server 值)'
|
||||
@@ -219,8 +212,7 @@ class WebSite(models.Model):
|
||||
default='',
|
||||
help_text='HTTP响应体'
|
||||
)
|
||||
content_type = models.CharField(
|
||||
max_length=200,
|
||||
content_type = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='响应类型(HTTP Content-Type 响应头)'
|
||||
|
||||
@@ -61,14 +61,14 @@ class WebsiteSnapshot(models.Model):
|
||||
)
|
||||
|
||||
# 扫描结果数据
|
||||
url = models.CharField(max_length=2000, help_text='站点URL')
|
||||
url = models.TextField(help_text='站点URL')
|
||||
host = models.CharField(max_length=253, blank=True, default='', help_text='主机名(域名或IP地址)')
|
||||
title = models.CharField(max_length=500, blank=True, default='', help_text='页面标题')
|
||||
status = models.IntegerField(null=True, blank=True, help_text='HTTP状态码')
|
||||
title = models.TextField(blank=True, default='', help_text='页面标题')
|
||||
status_code = models.IntegerField(null=True, blank=True, help_text='HTTP状态码')
|
||||
content_length = models.BigIntegerField(null=True, blank=True, help_text='内容长度')
|
||||
location = models.CharField(max_length=1000, blank=True, default='', help_text='重定向位置')
|
||||
web_server = models.CharField(max_length=200, blank=True, default='', help_text='Web服务器')
|
||||
content_type = models.CharField(max_length=200, blank=True, default='', help_text='内容类型')
|
||||
location = models.TextField(blank=True, default='', help_text='重定向位置')
|
||||
webserver = models.TextField(blank=True, default='', help_text='Web服务器')
|
||||
content_type = models.TextField(blank=True, default='', help_text='内容类型')
|
||||
tech = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
@@ -267,19 +267,19 @@ class EndpointSnapshot(models.Model):
|
||||
)
|
||||
|
||||
# 扫描结果数据
|
||||
url = models.CharField(max_length=2000, help_text='端点URL')
|
||||
url = models.TextField(help_text='端点URL')
|
||||
host = models.CharField(
|
||||
max_length=253,
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='主机名(域名或IP地址)'
|
||||
)
|
||||
title = models.CharField(max_length=1000, blank=True, default='', help_text='页面标题')
|
||||
title = models.TextField(blank=True, default='', help_text='页面标题')
|
||||
status_code = models.IntegerField(null=True, blank=True, help_text='HTTP状态码')
|
||||
content_length = models.IntegerField(null=True, blank=True, help_text='内容长度')
|
||||
location = models.CharField(max_length=1000, blank=True, default='', help_text='重定向位置')
|
||||
webserver = models.CharField(max_length=200, blank=True, default='', help_text='Web服务器')
|
||||
content_type = models.CharField(max_length=200, blank=True, default='', help_text='内容类型')
|
||||
location = models.TextField(blank=True, default='', help_text='重定向位置')
|
||||
webserver = models.TextField(blank=True, default='', help_text='Web服务器')
|
||||
content_type = models.TextField(blank=True, default='', help_text='内容类型')
|
||||
tech = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
|
||||
@@ -46,10 +46,10 @@ class DjangoWebsiteSnapshotRepository:
|
||||
url=item.url,
|
||||
host=item.host,
|
||||
title=item.title,
|
||||
status=item.status,
|
||||
status_code=item.status_code,
|
||||
content_length=item.content_length,
|
||||
location=item.location,
|
||||
web_server=item.web_server,
|
||||
webserver=item.webserver,
|
||||
content_type=item.content_type,
|
||||
tech=item.tech if item.tech else [],
|
||||
response_body=item.response_body,
|
||||
@@ -99,27 +99,12 @@ class DjangoWebsiteSnapshotRepository:
|
||||
WebsiteSnapshot.objects
|
||||
.filter(scan_id=scan_id)
|
||||
.values(
|
||||
'url', 'host', 'location', 'title', 'status',
|
||||
'content_length', 'content_type', 'web_server', 'tech',
|
||||
'url', 'host', 'location', 'title', 'status_code',
|
||||
'content_length', 'content_type', 'webserver', 'tech',
|
||||
'response_body', 'response_headers', 'vhost', 'created_at'
|
||||
)
|
||||
.order_by('url')
|
||||
)
|
||||
|
||||
for row in qs.iterator(chunk_size=batch_size):
|
||||
# 重命名字段以匹配 CSV 表头
|
||||
yield {
|
||||
'url': row['url'],
|
||||
'host': row['host'],
|
||||
'location': row['location'],
|
||||
'title': row['title'],
|
||||
'status_code': row['status'],
|
||||
'content_length': row['content_length'],
|
||||
'content_type': row['content_type'],
|
||||
'webserver': row['web_server'],
|
||||
'tech': row['tech'],
|
||||
'response_body': row['response_body'],
|
||||
'response_headers': row['response_headers'],
|
||||
'vhost': row['vhost'],
|
||||
'created_at': row['created_at'],
|
||||
}
|
||||
yield row
|
||||
|
||||
@@ -217,8 +217,6 @@ class WebsiteSnapshotSerializer(serializers.ModelSerializer):
|
||||
"""网站快照序列化器(用于扫描历史)"""
|
||||
|
||||
subdomain_name = serializers.CharField(source='subdomain.name', read_only=True)
|
||||
webserver = serializers.CharField(source='web_server', read_only=True) # 映射字段名
|
||||
status_code = serializers.IntegerField(source='status', read_only=True) # 映射字段名
|
||||
responseHeaders = serializers.CharField(source='response_headers', read_only=True) # 原始HTTP响应头
|
||||
|
||||
class Meta:
|
||||
@@ -228,9 +226,9 @@ class WebsiteSnapshotSerializer(serializers.ModelSerializer):
|
||||
'url',
|
||||
'location',
|
||||
'title',
|
||||
'webserver', # 使用映射后的字段名
|
||||
'webserver',
|
||||
'content_type',
|
||||
'status_code', # 使用映射后的字段名
|
||||
'status_code',
|
||||
'content_length',
|
||||
'response_body',
|
||||
'tech',
|
||||
|
||||
@@ -27,7 +27,7 @@ class EndpointService:
|
||||
'url': 'url',
|
||||
'host': 'host',
|
||||
'title': 'title',
|
||||
'status': 'status_code',
|
||||
'status_code': 'status_code',
|
||||
'tech': 'tech',
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ class WebSiteService:
|
||||
'url': 'url',
|
||||
'host': 'host',
|
||||
'title': 'title',
|
||||
'status': 'status_code',
|
||||
'status_code': 'status_code',
|
||||
'tech': 'tech',
|
||||
}
|
||||
|
||||
|
||||
@@ -6,11 +6,12 @@
|
||||
- 支持表达式语法解析
|
||||
- 支持 =(模糊)、==(精确)、!=(不等于)操作符
|
||||
- 支持 && (AND) 和 || (OR) 逻辑组合
|
||||
- 支持 Website 和 Endpoint 两种资产类型
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional, List, Dict, Any, Tuple
|
||||
from typing import Optional, List, Dict, Any, Tuple, Literal
|
||||
|
||||
from django.db import connection
|
||||
|
||||
@@ -30,6 +31,54 @@ FIELD_MAPPING = {
|
||||
# 数组类型字段
|
||||
ARRAY_FIELDS = {'tech'}
|
||||
|
||||
# 资产类型到视图名的映射
|
||||
VIEW_MAPPING = {
|
||||
'website': 'asset_search_view',
|
||||
'endpoint': 'endpoint_search_view',
|
||||
}
|
||||
|
||||
# 有效的资产类型
|
||||
VALID_ASSET_TYPES = {'website', 'endpoint'}
|
||||
|
||||
# Website 查询字段
|
||||
WEBSITE_SELECT_FIELDS = """
|
||||
id,
|
||||
url,
|
||||
host,
|
||||
title,
|
||||
tech,
|
||||
status_code,
|
||||
response_headers,
|
||||
response_body,
|
||||
content_type,
|
||||
content_length,
|
||||
webserver,
|
||||
location,
|
||||
vhost,
|
||||
created_at,
|
||||
target_id
|
||||
"""
|
||||
|
||||
# Endpoint 查询字段(包含 matched_gf_patterns)
|
||||
ENDPOINT_SELECT_FIELDS = """
|
||||
id,
|
||||
url,
|
||||
host,
|
||||
title,
|
||||
tech,
|
||||
status_code,
|
||||
response_headers,
|
||||
response_body,
|
||||
content_type,
|
||||
content_length,
|
||||
webserver,
|
||||
location,
|
||||
vhost,
|
||||
matched_gf_patterns,
|
||||
created_at,
|
||||
target_id
|
||||
"""
|
||||
|
||||
|
||||
class SearchQueryParser:
|
||||
"""
|
||||
@@ -226,6 +275,12 @@ class SearchQueryParser:
|
||||
if is_array:
|
||||
# 数组字段:检查数组中是否有元素包含该值
|
||||
return f"EXISTS (SELECT 1 FROM unnest({field}) AS t WHERE t ILIKE %s)", [f"%{value}%"]
|
||||
elif field == 'status_code':
|
||||
# 状态码是整数,模糊匹配转为精确匹配
|
||||
try:
|
||||
return f"{field} = %s", [int(value)]
|
||||
except ValueError:
|
||||
return f"{field}::text ILIKE %s", [f"%{value}%"]
|
||||
else:
|
||||
return f"{field} ILIKE %s", [f"%{value}%"]
|
||||
|
||||
@@ -259,37 +314,46 @@ class SearchQueryParser:
|
||||
return f"({field} IS NULL OR {field} != %s)", [value]
|
||||
|
||||
|
||||
AssetType = Literal['website', 'endpoint']
|
||||
|
||||
|
||||
class AssetSearchService:
|
||||
"""资产搜索服务"""
|
||||
|
||||
def search(self, query: str) -> List[Dict[str, Any]]:
|
||||
def search(
|
||||
self,
|
||||
query: str,
|
||||
asset_type: AssetType = 'website',
|
||||
limit: Optional[int] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
搜索资产
|
||||
|
||||
Args:
|
||||
query: 搜索查询字符串
|
||||
asset_type: 资产类型 ('website' 或 'endpoint')
|
||||
limit: 最大返回数量(可选)
|
||||
|
||||
Returns:
|
||||
List[Dict]: 搜索结果列表
|
||||
"""
|
||||
where_clause, params = SearchQueryParser.parse(query)
|
||||
|
||||
# 根据资产类型选择视图和字段
|
||||
view_name = VIEW_MAPPING.get(asset_type, 'asset_search_view')
|
||||
select_fields = ENDPOINT_SELECT_FIELDS if asset_type == 'endpoint' else WEBSITE_SELECT_FIELDS
|
||||
|
||||
sql = f"""
|
||||
SELECT
|
||||
id,
|
||||
url,
|
||||
host,
|
||||
title,
|
||||
tech,
|
||||
status_code,
|
||||
response_headers,
|
||||
response_body,
|
||||
target_id
|
||||
FROM asset_search_view
|
||||
SELECT {select_fields}
|
||||
FROM {view_name}
|
||||
WHERE {where_clause}
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
|
||||
# 添加 LIMIT
|
||||
if limit is not None and limit > 0:
|
||||
sql += f" LIMIT {int(limit)}"
|
||||
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql, params)
|
||||
@@ -305,19 +369,23 @@ class AssetSearchService:
|
||||
logger.error(f"搜索查询失败: {e}, SQL: {sql}, params: {params}")
|
||||
raise
|
||||
|
||||
def count(self, query: str) -> int:
|
||||
def count(self, query: str, asset_type: AssetType = 'website') -> int:
|
||||
"""
|
||||
统计搜索结果数量
|
||||
|
||||
Args:
|
||||
query: 搜索查询字符串
|
||||
asset_type: 资产类型 ('website' 或 'endpoint')
|
||||
|
||||
Returns:
|
||||
int: 结果总数
|
||||
"""
|
||||
where_clause, params = SearchQueryParser.parse(query)
|
||||
|
||||
sql = f"SELECT COUNT(*) FROM asset_search_view WHERE {where_clause}"
|
||||
# 根据资产类型选择视图
|
||||
view_name = VIEW_MAPPING.get(asset_type, 'asset_search_view')
|
||||
|
||||
sql = f"SELECT COUNT(*) FROM {view_name} WHERE {where_clause}"
|
||||
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
@@ -326,3 +394,46 @@ class AssetSearchService:
|
||||
except Exception as e:
|
||||
logger.error(f"统计查询失败: {e}")
|
||||
raise
|
||||
|
||||
def search_iter(
|
||||
self,
|
||||
query: str,
|
||||
asset_type: AssetType = 'website',
|
||||
batch_size: int = 1000
|
||||
):
|
||||
"""
|
||||
流式搜索资产(使用服务端游标,内存友好)
|
||||
|
||||
Args:
|
||||
query: 搜索查询字符串
|
||||
asset_type: 资产类型 ('website' 或 'endpoint')
|
||||
batch_size: 每批获取的数量
|
||||
|
||||
Yields:
|
||||
Dict: 单条搜索结果
|
||||
"""
|
||||
where_clause, params = SearchQueryParser.parse(query)
|
||||
|
||||
# 根据资产类型选择视图和字段
|
||||
view_name = VIEW_MAPPING.get(asset_type, 'asset_search_view')
|
||||
select_fields = ENDPOINT_SELECT_FIELDS if asset_type == 'endpoint' else WEBSITE_SELECT_FIELDS
|
||||
|
||||
sql = f"""
|
||||
SELECT {select_fields}
|
||||
FROM {view_name}
|
||||
WHERE {where_clause}
|
||||
ORDER BY created_at DESC
|
||||
"""
|
||||
|
||||
try:
|
||||
# 使用服务端游标,避免一次性加载所有数据到内存
|
||||
with connection.cursor(name='export_cursor') as cursor:
|
||||
cursor.itersize = batch_size
|
||||
cursor.execute(sql, params)
|
||||
columns = [col[0] for col in cursor.description]
|
||||
|
||||
for row in cursor:
|
||||
yield dict(zip(columns, row))
|
||||
except Exception as e:
|
||||
logger.error(f"流式搜索查询失败: {e}, SQL: {sql}, params: {params}")
|
||||
raise
|
||||
|
||||
@@ -72,7 +72,7 @@ class EndpointSnapshotsService:
|
||||
'url': 'url',
|
||||
'host': 'host',
|
||||
'title': 'title',
|
||||
'status': 'status_code',
|
||||
'status_code': 'status_code',
|
||||
'webserver': 'webserver',
|
||||
'tech': 'tech',
|
||||
}
|
||||
|
||||
@@ -73,8 +73,8 @@ class WebsiteSnapshotsService:
|
||||
'url': 'url',
|
||||
'host': 'host',
|
||||
'title': 'title',
|
||||
'status': 'status',
|
||||
'webserver': 'web_server',
|
||||
'status_code': 'status_code',
|
||||
'webserver': 'webserver',
|
||||
'tech': 'tech',
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from .views import (
|
||||
VulnerabilityViewSet,
|
||||
AssetStatisticsViewSet,
|
||||
AssetSearchView,
|
||||
AssetSearchExportView,
|
||||
)
|
||||
|
||||
# 创建 DRF 路由器
|
||||
@@ -27,4 +28,5 @@ router.register(r'statistics', AssetStatisticsViewSet, basename='asset-statistic
|
||||
urlpatterns = [
|
||||
path('assets/', include(router.urls)),
|
||||
path('assets/search/', AssetSearchView.as_view(), name='asset-search'),
|
||||
path('assets/search/export/', AssetSearchExportView.as_view(), name='asset-search-export'),
|
||||
]
|
||||
|
||||
@@ -19,7 +19,7 @@ from .asset_views import (
|
||||
HostPortMappingSnapshotViewSet,
|
||||
VulnerabilitySnapshotViewSet,
|
||||
)
|
||||
from .search_views import AssetSearchView
|
||||
from .search_views import AssetSearchView, AssetSearchExportView
|
||||
|
||||
__all__ = [
|
||||
'AssetStatisticsViewSet',
|
||||
@@ -36,4 +36,5 @@ __all__ = [
|
||||
'HostPortMappingSnapshotViewSet',
|
||||
'VulnerabilitySnapshotViewSet',
|
||||
'AssetSearchView',
|
||||
'AssetSearchExportView',
|
||||
]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
提供资产搜索的 REST API 接口:
|
||||
- GET /api/assets/search/ - 搜索资产
|
||||
- GET /api/assets/search/export/ - 导出搜索结果为 CSV
|
||||
|
||||
搜索语法:
|
||||
- field="value" 模糊匹配(ILIKE %value%)
|
||||
@@ -19,19 +20,25 @@
|
||||
- status: 状态码
|
||||
- body: 响应体
|
||||
- header: 响应头
|
||||
|
||||
支持的资产类型:
|
||||
- website: 站点(默认)
|
||||
- endpoint: 端点
|
||||
"""
|
||||
|
||||
import logging
|
||||
import json
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
from rest_framework import status
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.request import Request
|
||||
from django.http import StreamingHttpResponse
|
||||
from django.db import connection
|
||||
|
||||
from apps.common.response_helpers import success_response, error_response
|
||||
from apps.common.error_codes import ErrorCodes
|
||||
from apps.asset.services.search_service import AssetSearchService
|
||||
from apps.asset.services.search_service import AssetSearchService, VALID_ASSET_TYPES
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -44,12 +51,13 @@ class AssetSearchView(APIView):
|
||||
|
||||
Query Parameters:
|
||||
q: 搜索查询表达式
|
||||
asset_type: 资产类型 ('website' 或 'endpoint',默认 'website')
|
||||
page: 页码(从 1 开始,默认 1)
|
||||
pageSize: 每页数量(默认 10,最大 100)
|
||||
|
||||
示例查询:
|
||||
?q=host="api" && tech="nginx"
|
||||
?q=tech="vue" || tech="react"
|
||||
?q=tech="vue" || tech="react"&asset_type=endpoint
|
||||
?q=status=="200" && host!="test"
|
||||
|
||||
Response:
|
||||
@@ -58,7 +66,8 @@ class AssetSearchView(APIView):
|
||||
"total": 100,
|
||||
"page": 1,
|
||||
"pageSize": 10,
|
||||
"totalPages": 10
|
||||
"totalPages": 10,
|
||||
"assetType": "website"
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -80,20 +89,33 @@ class AssetSearchView(APIView):
|
||||
result[key.strip()] = value.strip()
|
||||
return result
|
||||
|
||||
def _format_result(self, result: dict, vulnerabilities_by_url: dict) -> dict:
|
||||
def _format_result(self, result: dict, vulnerabilities_by_url: dict, asset_type: str) -> dict:
|
||||
"""格式化单个搜索结果"""
|
||||
website_url = result.get('url', '')
|
||||
vulns = vulnerabilities_by_url.get(website_url, [])
|
||||
url = result.get('url', '')
|
||||
vulns = vulnerabilities_by_url.get(url, [])
|
||||
|
||||
return {
|
||||
'url': website_url,
|
||||
# 基础字段(Website 和 Endpoint 共有)
|
||||
formatted = {
|
||||
'id': result.get('id'),
|
||||
'url': url,
|
||||
'host': result.get('host', ''),
|
||||
'title': result.get('title', ''),
|
||||
'technologies': result.get('tech', []) or [],
|
||||
'statusCode': result.get('status_code'),
|
||||
'contentLength': result.get('content_length'),
|
||||
'contentType': result.get('content_type', ''),
|
||||
'webserver': result.get('webserver', ''),
|
||||
'location': result.get('location', ''),
|
||||
'vhost': result.get('vhost'),
|
||||
'responseHeaders': self._parse_headers(result.get('response_headers')),
|
||||
'responseBody': result.get('response_body', ''),
|
||||
'vulnerabilities': [
|
||||
'createdAt': result.get('created_at').isoformat() if result.get('created_at') else None,
|
||||
'targetId': result.get('target_id'),
|
||||
}
|
||||
|
||||
# Website 特有字段:漏洞关联
|
||||
if asset_type == 'website':
|
||||
formatted['vulnerabilities'] = [
|
||||
{
|
||||
'id': v.get('id'),
|
||||
'name': v.get('vuln_type', ''),
|
||||
@@ -101,8 +123,13 @@ class AssetSearchView(APIView):
|
||||
'severity': v.get('severity', 'info'),
|
||||
}
|
||||
for v in vulns
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
# Endpoint 特有字段
|
||||
if asset_type == 'endpoint':
|
||||
formatted['matchedGfPatterns'] = result.get('matched_gf_patterns', []) or []
|
||||
|
||||
return formatted
|
||||
|
||||
def _get_vulnerabilities_by_url_prefix(self, website_urls: list) -> dict:
|
||||
"""
|
||||
@@ -199,6 +226,15 @@ class AssetSearchView(APIView):
|
||||
status_code=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 获取并验证资产类型
|
||||
asset_type = request.query_params.get('asset_type', 'website').strip().lower()
|
||||
if asset_type not in VALID_ASSET_TYPES:
|
||||
return error_response(
|
||||
code=ErrorCodes.VALIDATION_ERROR,
|
||||
message=f'Invalid asset_type. Must be one of: {", ".join(VALID_ASSET_TYPES)}',
|
||||
status_code=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 获取分页参数
|
||||
try:
|
||||
page = int(request.query_params.get('page', 1))
|
||||
@@ -212,19 +248,21 @@ class AssetSearchView(APIView):
|
||||
page_size = min(max(1, page_size), 100)
|
||||
|
||||
# 获取总数和搜索结果
|
||||
total = self.service.count(query)
|
||||
total = self.service.count(query, asset_type)
|
||||
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
all_results = self.service.search(query)
|
||||
all_results = self.service.search(query, asset_type)
|
||||
results = all_results[offset:offset + page_size]
|
||||
|
||||
# 批量查询漏洞数据(按 URL 前缀匹配)
|
||||
website_urls = [(r.get('url'), r.get('target_id')) for r in results if r.get('url') and r.get('target_id')]
|
||||
vulnerabilities_by_url = self._get_vulnerabilities_by_url_prefix(website_urls) if website_urls else {}
|
||||
# 批量查询漏洞数据(仅 Website 类型需要)
|
||||
vulnerabilities_by_url = {}
|
||||
if asset_type == 'website':
|
||||
website_urls = [(r.get('url'), r.get('target_id')) for r in results if r.get('url') and r.get('target_id')]
|
||||
vulnerabilities_by_url = self._get_vulnerabilities_by_url_prefix(website_urls) if website_urls else {}
|
||||
|
||||
# 格式化结果
|
||||
formatted_results = [self._format_result(r, vulnerabilities_by_url) for r in results]
|
||||
formatted_results = [self._format_result(r, vulnerabilities_by_url, asset_type) for r in results]
|
||||
|
||||
return success_response(data={
|
||||
'results': formatted_results,
|
||||
@@ -232,4 +270,95 @@ class AssetSearchView(APIView):
|
||||
'page': page,
|
||||
'pageSize': page_size,
|
||||
'totalPages': total_pages,
|
||||
'assetType': asset_type,
|
||||
})
|
||||
|
||||
|
||||
class AssetSearchExportView(APIView):
|
||||
"""
|
||||
资产搜索导出 API
|
||||
|
||||
GET /api/assets/search/export/
|
||||
|
||||
Query Parameters:
|
||||
q: 搜索查询表达式
|
||||
asset_type: 资产类型 ('website' 或 'endpoint',默认 'website')
|
||||
|
||||
Response:
|
||||
CSV 文件流(使用服务端游标,支持大数据量导出)
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.service = AssetSearchService()
|
||||
|
||||
def _get_headers_and_formatters(self, asset_type: str):
|
||||
"""获取 CSV 表头和格式化器"""
|
||||
from apps.common.utils import format_datetime, format_list_field
|
||||
|
||||
if asset_type == 'website':
|
||||
headers = ['url', 'host', 'title', 'status_code', 'content_type', 'content_length',
|
||||
'webserver', 'location', 'tech', 'vhost', 'created_at']
|
||||
else:
|
||||
headers = ['url', 'host', 'title', 'status_code', 'content_type', 'content_length',
|
||||
'webserver', 'location', 'tech', 'matched_gf_patterns', 'vhost', 'created_at']
|
||||
|
||||
formatters = {
|
||||
'created_at': format_datetime,
|
||||
'tech': lambda x: format_list_field(x, separator='; '),
|
||||
'matched_gf_patterns': lambda x: format_list_field(x, separator='; '),
|
||||
'vhost': lambda x: 'true' if x else ('false' if x is False else ''),
|
||||
}
|
||||
|
||||
return headers, formatters
|
||||
|
||||
def get(self, request: Request):
|
||||
"""导出搜索结果为 CSV(流式导出,无数量限制)"""
|
||||
from apps.common.utils import generate_csv_rows
|
||||
|
||||
# 获取搜索查询
|
||||
query = request.query_params.get('q', '').strip()
|
||||
|
||||
if not query:
|
||||
return error_response(
|
||||
code=ErrorCodes.VALIDATION_ERROR,
|
||||
message='Search query (q) is required',
|
||||
status_code=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 获取并验证资产类型
|
||||
asset_type = request.query_params.get('asset_type', 'website').strip().lower()
|
||||
if asset_type not in VALID_ASSET_TYPES:
|
||||
return error_response(
|
||||
code=ErrorCodes.VALIDATION_ERROR,
|
||||
message=f'Invalid asset_type. Must be one of: {", ".join(VALID_ASSET_TYPES)}',
|
||||
status_code=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# 检查是否有结果(快速检查,避免空导出)
|
||||
total = self.service.count(query, asset_type)
|
||||
if total == 0:
|
||||
return error_response(
|
||||
code=ErrorCodes.NOT_FOUND,
|
||||
message='No results to export',
|
||||
status_code=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
# 获取表头和格式化器
|
||||
headers, formatters = self._get_headers_and_formatters(asset_type)
|
||||
|
||||
# 获取流式数据迭代器
|
||||
data_iterator = self.service.search_iter(query, asset_type)
|
||||
|
||||
# 生成文件名
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
filename = f'search_{asset_type}_{timestamp}.csv'
|
||||
|
||||
# 返回流式响应
|
||||
response = StreamingHttpResponse(
|
||||
generate_csv_rows(data_iterator, headers, formatters),
|
||||
content_type='text/csv; charset=utf-8'
|
||||
)
|
||||
response['Content-Disposition'] = f'attachment; filename="{filename}"'
|
||||
|
||||
return response
|
||||
|
||||
@@ -204,14 +204,13 @@ def _run_scans_sequentially(
|
||||
# 流式执行扫描并实时保存结果
|
||||
result = run_and_stream_save_websites_task(
|
||||
cmd=command,
|
||||
tool_name=tool_name, # 新增:工具名称
|
||||
tool_name=tool_name,
|
||||
scan_id=scan_id,
|
||||
target_id=target_id,
|
||||
cwd=str(site_scan_dir),
|
||||
shell=True,
|
||||
batch_size=1000,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file) # 新增:日志文件路径
|
||||
log_file=str(log_file)
|
||||
)
|
||||
|
||||
tool_stats[tool_name] = {
|
||||
|
||||
@@ -212,7 +212,6 @@ def _validate_and_stream_save_urls(
|
||||
target_id=target_id,
|
||||
cwd=str(url_fetch_dir),
|
||||
shell=True,
|
||||
batch_size=500,
|
||||
timeout=timeout,
|
||||
log_file=str(log_file)
|
||||
)
|
||||
|
||||
@@ -341,9 +341,9 @@ def _process_batch(
|
||||
url=record['url'],
|
||||
host=host,
|
||||
title=record.get('title', '') or '',
|
||||
status=record.get('status_code'),
|
||||
status_code=record.get('status_code'),
|
||||
content_length=record.get('content_length'),
|
||||
web_server=record.get('server', '') or '',
|
||||
webserver=record.get('server', '') or '',
|
||||
tech=record.get('techs', []),
|
||||
)
|
||||
snapshot_dtos.append(dto)
|
||||
|
||||
@@ -30,7 +30,6 @@ from typing import Generator, Optional, Dict, Any, TYPE_CHECKING
|
||||
from django.db import IntegrityError, OperationalError, DatabaseError
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
from dateutil.parser import parse as parse_datetime
|
||||
from psycopg2 import InterfaceError
|
||||
|
||||
from apps.asset.dtos.snapshot import WebsiteSnapshotDTO
|
||||
@@ -62,6 +61,18 @@ class ServiceSet:
|
||||
)
|
||||
|
||||
|
||||
def _sanitize_string(value: str) -> str:
|
||||
"""
|
||||
清理字符串中的 NUL 字符和其他不可打印字符
|
||||
|
||||
PostgreSQL 不允许字符串字段包含 NUL (0x00) 字符
|
||||
"""
|
||||
if not value:
|
||||
return value
|
||||
# 移除 NUL 字符
|
||||
return value.replace('\x00', '')
|
||||
|
||||
|
||||
def normalize_url(url: str) -> str:
|
||||
"""
|
||||
标准化 URL,移除默认端口号
|
||||
@@ -117,70 +128,50 @@ def normalize_url(url: str) -> str:
|
||||
return url
|
||||
|
||||
|
||||
def _extract_hostname(url: str) -> str:
|
||||
"""
|
||||
从 URL 提取主机名
|
||||
|
||||
Args:
|
||||
url: URL 字符串
|
||||
|
||||
Returns:
|
||||
str: 提取的主机名(小写)
|
||||
"""
|
||||
try:
|
||||
if url:
|
||||
parsed = urlparse(url)
|
||||
if parsed.hostname:
|
||||
return parsed.hostname
|
||||
# 降级方案:手动提取
|
||||
return url.replace('http://', '').replace('https://', '').split('/')[0].split(':')[0]
|
||||
return ''
|
||||
except Exception as e:
|
||||
logger.debug("提取主机名失败: %s", e)
|
||||
return ''
|
||||
|
||||
|
||||
class HttpxRecord:
|
||||
"""httpx 扫描记录数据类"""
|
||||
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.url = data.get('url', '')
|
||||
self.input = data.get('input', '')
|
||||
self.title = data.get('title', '')
|
||||
self.status_code = data.get('status_code')
|
||||
self.content_length = data.get('content_length')
|
||||
self.content_type = data.get('content_type', '')
|
||||
self.location = data.get('location', '')
|
||||
self.webserver = data.get('webserver', '')
|
||||
self.response_body = data.get('body', '') # 从 body 字段获取完整响应体
|
||||
self.tech = data.get('tech', [])
|
||||
self.vhost = data.get('vhost')
|
||||
self.failed = data.get('failed', False)
|
||||
self.timestamp = data.get('timestamp')
|
||||
self.response_headers = data.get('raw_header', '') # 从 raw_header 字段获取原始响应头字符串
|
||||
self.url = _sanitize_string(data.get('url', ''))
|
||||
self.input = _sanitize_string(data.get('input', ''))
|
||||
self.title = _sanitize_string(data.get('title', ''))
|
||||
self.status_code = data.get('status_code') # int,不需要清理
|
||||
self.content_length = data.get('content_length') # int,不需要清理
|
||||
self.content_type = _sanitize_string(data.get('content_type', ''))
|
||||
self.location = _sanitize_string(data.get('location', ''))
|
||||
self.webserver = _sanitize_string(data.get('webserver', ''))
|
||||
self.response_body = _sanitize_string(data.get('body', ''))
|
||||
self.tech = [_sanitize_string(t) for t in data.get('tech', []) if isinstance(t, str)] # 列表中的字符串也需要清理
|
||||
self.vhost = data.get('vhost') # bool,不需要清理
|
||||
self.failed = data.get('failed', False) # bool,不需要清理
|
||||
self.response_headers = _sanitize_string(data.get('raw_header', ''))
|
||||
|
||||
# 从 URL 中提取主机名
|
||||
self.host = self._extract_hostname()
|
||||
|
||||
def _extract_hostname(self) -> str:
|
||||
"""
|
||||
从 URL 或 input 字段提取主机名
|
||||
|
||||
优先级:
|
||||
1. 使用 urlparse 解析 URL 获取 hostname
|
||||
2. 从 input 字段提取(处理可能包含协议的情况)
|
||||
3. 从 URL 字段手动提取(降级方案)
|
||||
|
||||
Returns:
|
||||
str: 提取的主机名(小写)
|
||||
"""
|
||||
try:
|
||||
# 方法 1: 使用 urlparse 解析 URL
|
||||
if self.url:
|
||||
parsed = urlparse(self.url)
|
||||
if parsed.hostname:
|
||||
return parsed.hostname
|
||||
|
||||
# 方法 2: 从 input 字段提取
|
||||
if self.input:
|
||||
host = self.input.strip().lower()
|
||||
# 移除协议前缀
|
||||
if host.startswith(('http://', 'https://')):
|
||||
host = host.split('//', 1)[1].split('/')[0]
|
||||
return host
|
||||
|
||||
# 方法 3: 从 URL 手动提取(降级方案)
|
||||
if self.url:
|
||||
return self.url.replace('http://', '').replace('https://', '').split('/')[0]
|
||||
|
||||
# 兜底:返回空字符串
|
||||
return ''
|
||||
|
||||
except Exception as e:
|
||||
# 异常处理:尽力从 input 或 URL 提取
|
||||
logger.debug("提取主机名失败: %s,使用降级方案", e)
|
||||
if self.input:
|
||||
return self.input.strip().lower()
|
||||
if self.url:
|
||||
return self.url.replace('http://', '').replace('https://', '').split('/')[0]
|
||||
return ''
|
||||
# 从 URL 中提取主机名(优先使用 httpx 返回的 host,否则自动提取)
|
||||
httpx_host = _sanitize_string(data.get('host', ''))
|
||||
self.host = httpx_host if httpx_host else _extract_hostname(self.url)
|
||||
|
||||
|
||||
def _save_batch_with_retry(
|
||||
@@ -228,39 +219,31 @@ def _save_batch_with_retry(
|
||||
}
|
||||
|
||||
except (OperationalError, DatabaseError, InterfaceError) as e:
|
||||
# 数据库连接/操作错误,可重试
|
||||
# 数据库级错误(连接中断、表结构不匹配等):按指数退避重试,最终失败时抛出异常让 Flow 失败
|
||||
if attempt < max_retries - 1:
|
||||
wait_time = 2 ** attempt # 指数退避: 1s, 2s, 4s
|
||||
wait_time = 2 ** attempt
|
||||
logger.warning(
|
||||
"批次 %d 保存失败(第 %d 次尝试),%d秒后重试: %s",
|
||||
batch_num, attempt + 1, wait_time, str(e)[:100]
|
||||
)
|
||||
time.sleep(wait_time)
|
||||
else:
|
||||
logger.error("批次 %d 保存失败(已重试 %d 次): %s", batch_num, max_retries, e)
|
||||
return {
|
||||
'success': False,
|
||||
'created_websites': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
# 其他未知错误 - 检查是否为连接问题
|
||||
error_str = str(e).lower()
|
||||
if 'connection' in error_str and attempt < max_retries - 1:
|
||||
logger.warning(
|
||||
"批次 %d 连接相关错误(尝试 %d/%d): %s,Repository 装饰器会自动重连",
|
||||
batch_num, attempt + 1, max_retries, str(e)
|
||||
logger.error(
|
||||
"批次 %d 保存失败(已重试 %d 次),将终止任务: %s",
|
||||
batch_num,
|
||||
max_retries,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
time.sleep(2)
|
||||
else:
|
||||
logger.error("批次 %d 未知错误: %s", batch_num, e, exc_info=True)
|
||||
return {
|
||||
'success': False,
|
||||
'created_websites': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
# 让上层 Task 感知失败,从而标记整个扫描为失败
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
# 其他未知异常也不再吞掉,直接抛出以便 Flow 标记为失败
|
||||
logger.error("批次 %d 未知错误: %s", batch_num, e, exc_info=True)
|
||||
raise
|
||||
|
||||
# 理论上不会走到这里,保留兜底返回值以满足类型约束
|
||||
return {
|
||||
'success': False,
|
||||
'created_websites': 0,
|
||||
@@ -328,43 +311,39 @@ def _save_batch(
|
||||
skipped_failed += 1
|
||||
continue
|
||||
|
||||
# 解析时间戳
|
||||
created_at = None
|
||||
if hasattr(record, 'timestamp') and record.timestamp:
|
||||
try:
|
||||
created_at = parse_datetime(record.timestamp)
|
||||
except (ValueError, TypeError) as e:
|
||||
logger.warning(f"无法解析时间戳 {record.timestamp}: {e}")
|
||||
|
||||
# 使用 input 字段(原始扫描的 URL)而不是 url 字段(重定向后的 URL)
|
||||
# 原因:避免多个不同的输入 URL 重定向到同一个 URL 时产生唯一约束冲突
|
||||
# 例如:http://example.com 和 https://example.com 都重定向到 https://example.com
|
||||
# 如果使用 record.url,两条记录会有相同的 url,导致数据库冲突
|
||||
# 如果使用 record.input,两条记录保留原始输入,不会冲突
|
||||
normalized_url = normalize_url(record.input)
|
||||
|
||||
# 提取 host 字段(域名或IP地址)
|
||||
host = record.host if record.host else ''
|
||||
|
||||
# 创建 WebsiteSnapshot DTO
|
||||
snapshot_dto = WebsiteSnapshotDTO(
|
||||
scan_id=scan_id,
|
||||
target_id=target_id, # 主关联字段
|
||||
url=normalized_url, # 保存原始输入 URL(归一化后)
|
||||
host=host, # 主机名(域名或IP地址)
|
||||
location=record.location, # location 字段保存重定向信息
|
||||
title=record.title[:1000] if record.title else '',
|
||||
web_server=record.webserver[:200] if record.webserver else '',
|
||||
response_body=record.response_body if record.response_body else '',
|
||||
content_type=record.content_type[:200] if record.content_type else '',
|
||||
tech=record.tech if isinstance(record.tech, list) else [],
|
||||
status=record.status_code,
|
||||
content_length=record.content_length,
|
||||
vhost=record.vhost,
|
||||
response_headers=record.response_headers if record.response_headers else '',
|
||||
)
|
||||
|
||||
snapshot_items.append(snapshot_dto)
|
||||
try:
|
||||
# 使用 input 字段(原始扫描的 URL)而不是 url 字段(重定向后的 URL)
|
||||
# 原因:避免多个不同的输入 URL 重定向到同一个 URL 时产生唯一约束冲突
|
||||
# 例如:http://example.com 和 https://example.com 都重定向到 https://example.com
|
||||
# 如果使用 record.url,两条记录会有相同的 url,导致数据库冲突
|
||||
# 如果使用 record.input,两条记录保留原始输入,不会冲突
|
||||
normalized_url = normalize_url(record.input) if record.input else normalize_url(record.url)
|
||||
|
||||
# 提取 host 字段(域名或IP地址)
|
||||
host = record.host if record.host else ''
|
||||
|
||||
# 创建 WebsiteSnapshot DTO
|
||||
snapshot_dto = WebsiteSnapshotDTO(
|
||||
scan_id=scan_id,
|
||||
target_id=target_id, # 主关联字段
|
||||
url=normalized_url, # 保存原始输入 URL(归一化后)
|
||||
host=host, # 主机名(域名或IP地址)
|
||||
location=record.location if record.location else '',
|
||||
title=record.title if record.title else '',
|
||||
webserver=record.webserver if record.webserver else '',
|
||||
response_body=record.response_body if record.response_body else '',
|
||||
content_type=record.content_type if record.content_type else '',
|
||||
tech=record.tech if isinstance(record.tech, list) else [],
|
||||
status_code=record.status_code,
|
||||
content_length=record.content_length,
|
||||
vhost=record.vhost,
|
||||
response_headers=record.response_headers if record.response_headers else '',
|
||||
)
|
||||
|
||||
snapshot_items.append(snapshot_dto)
|
||||
except Exception as e:
|
||||
logger.error("处理记录失败: %s,错误: %s", record.url, e)
|
||||
continue
|
||||
|
||||
# ========== Step 3: 保存快照并同步到资产表(通过快照 Service)==========
|
||||
if snapshot_items:
|
||||
@@ -386,28 +365,31 @@ def _parse_and_validate_line(line: str) -> Optional[HttpxRecord]:
|
||||
Optional[HttpxRecord]: 有效的 httpx 扫描记录,或 None 如果验证失败
|
||||
|
||||
验证步骤:
|
||||
1. 解析 JSON 格式
|
||||
2. 验证数据类型为字典
|
||||
3. 创建 HttpxRecord 对象
|
||||
4. 验证必要字段(url)
|
||||
1. 清理 NUL 字符
|
||||
2. 解析 JSON 格式
|
||||
3. 验证数据类型为字典
|
||||
4. 创建 HttpxRecord 对象
|
||||
5. 验证必要字段(url)
|
||||
"""
|
||||
try:
|
||||
# 步骤 1: 解析 JSON
|
||||
# 步骤 1: 清理 NUL 字符后再解析 JSON
|
||||
line = _sanitize_string(line)
|
||||
|
||||
# 步骤 2: 解析 JSON
|
||||
try:
|
||||
line_data = json.loads(line, strict=False)
|
||||
except json.JSONDecodeError:
|
||||
# logger.info("跳过非 JSON 行: %s", line)
|
||||
return None
|
||||
|
||||
# 步骤 2: 验证数据类型
|
||||
# 步骤 3: 验证数据类型
|
||||
if not isinstance(line_data, dict):
|
||||
logger.info("跳过非字典数据")
|
||||
return None
|
||||
|
||||
# 步骤 3: 创建记录
|
||||
# 步骤 4: 创建记录
|
||||
record = HttpxRecord(line_data)
|
||||
|
||||
# 步骤 4: 验证必要字段
|
||||
# 步骤 5: 验证必要字段
|
||||
if not record.url:
|
||||
logger.info("URL 为空,跳过 - 数据: %s", str(line_data)[:200])
|
||||
return None
|
||||
@@ -416,7 +398,7 @@ def _parse_and_validate_line(line: str) -> Optional[HttpxRecord]:
|
||||
return record
|
||||
|
||||
except Exception:
|
||||
logger.info("跳过无法解析的行: %s", line[:100])
|
||||
logger.info("跳过无法解析的行: %s", line[:100] if line else 'empty')
|
||||
return None
|
||||
|
||||
|
||||
@@ -464,8 +446,8 @@ def _parse_httpx_stream_output(
|
||||
# yield 一条有效记录
|
||||
yield record
|
||||
|
||||
# 每处理 1000 条记录输出一次进度
|
||||
if valid_records % 1000 == 0:
|
||||
# 每处理 5 条记录输出一次进度
|
||||
if valid_records % 5 == 0:
|
||||
logger.info("已解析 %d 条有效记录...", valid_records)
|
||||
|
||||
except subprocess.TimeoutExpired as e:
|
||||
@@ -604,8 +586,8 @@ def _process_records_in_batches(
|
||||
_process_batch(batch, scan_id, target_id, batch_num, total_stats, failed_batches, services)
|
||||
batch = [] # 清空批次
|
||||
|
||||
# 每20个批次输出进度
|
||||
if batch_num % 20 == 0:
|
||||
# 每 2 个批次输出进度
|
||||
if batch_num % 2 == 0:
|
||||
logger.info("进度: 已处理 %d 批次,%d 条记录", batch_num, total_records)
|
||||
|
||||
# 保存最后一批
|
||||
@@ -676,11 +658,7 @@ def _cleanup_resources(data_generator) -> None:
|
||||
logger.error("关闭生成器时出错: %s", gen_close_error)
|
||||
|
||||
|
||||
@task(
|
||||
name='run_and_stream_save_websites',
|
||||
retries=0,
|
||||
log_prints=True
|
||||
)
|
||||
@task(name='run_and_stream_save_websites', retries=0)
|
||||
def run_and_stream_save_websites_task(
|
||||
cmd: str,
|
||||
tool_name: str,
|
||||
@@ -688,7 +666,7 @@ def run_and_stream_save_websites_task(
|
||||
target_id: int,
|
||||
cwd: Optional[str] = None,
|
||||
shell: bool = False,
|
||||
batch_size: int = 1000,
|
||||
batch_size: int = 10,
|
||||
timeout: Optional[int] = None,
|
||||
log_file: Optional[str] = None
|
||||
) -> dict:
|
||||
|
||||
@@ -23,10 +23,11 @@ import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
from prefect import task
|
||||
from typing import Generator, Optional
|
||||
from typing import Generator, Optional, Dict, Any
|
||||
from django.db import IntegrityError, OperationalError, DatabaseError
|
||||
from psycopg2 import InterfaceError
|
||||
from dataclasses import dataclass
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from apps.asset.services.snapshot import EndpointSnapshotsService
|
||||
from apps.scan.utils import execute_stream
|
||||
@@ -63,7 +64,53 @@ def _sanitize_string(value: str) -> str:
|
||||
return value.replace('\x00', '')
|
||||
|
||||
|
||||
def _parse_and_validate_line(line: str) -> Optional[dict]:
|
||||
def _extract_hostname(url: str) -> str:
|
||||
"""
|
||||
从 URL 提取主机名
|
||||
|
||||
Args:
|
||||
url: URL 字符串
|
||||
|
||||
Returns:
|
||||
str: 提取的主机名(小写)
|
||||
"""
|
||||
try:
|
||||
if url:
|
||||
parsed = urlparse(url)
|
||||
if parsed.hostname:
|
||||
return parsed.hostname
|
||||
# 降级方案:手动提取
|
||||
return url.replace('http://', '').replace('https://', '').split('/')[0].split(':')[0]
|
||||
return ''
|
||||
except Exception as e:
|
||||
logger.debug("提取主机名失败: %s", e)
|
||||
return ''
|
||||
|
||||
|
||||
class HttpxRecord:
|
||||
"""httpx 扫描记录数据类"""
|
||||
|
||||
def __init__(self, data: Dict[str, Any]):
|
||||
self.url = _sanitize_string(data.get('url', ''))
|
||||
self.input = _sanitize_string(data.get('input', ''))
|
||||
self.title = _sanitize_string(data.get('title', ''))
|
||||
self.status_code = data.get('status_code') # int,不需要清理
|
||||
self.content_length = data.get('content_length') # int,不需要清理
|
||||
self.content_type = _sanitize_string(data.get('content_type', ''))
|
||||
self.location = _sanitize_string(data.get('location', ''))
|
||||
self.webserver = _sanitize_string(data.get('webserver', ''))
|
||||
self.response_body = _sanitize_string(data.get('body', ''))
|
||||
self.tech = [_sanitize_string(t) for t in data.get('tech', []) if isinstance(t, str)] # 列表中的字符串也需要清理
|
||||
self.vhost = data.get('vhost') # bool,不需要清理
|
||||
self.failed = data.get('failed', False) # bool,不需要清理
|
||||
self.response_headers = _sanitize_string(data.get('raw_header', ''))
|
||||
|
||||
# 从 URL 中提取主机名(优先使用 httpx 返回的 host,否则自动提取)
|
||||
httpx_host = _sanitize_string(data.get('host', ''))
|
||||
self.host = httpx_host if httpx_host else _extract_hostname(self.url)
|
||||
|
||||
|
||||
def _parse_and_validate_line(line: str) -> Optional[HttpxRecord]:
|
||||
"""
|
||||
解析并验证单行 httpx JSON 输出
|
||||
|
||||
@@ -71,9 +118,7 @@ def _parse_and_validate_line(line: str) -> Optional[dict]:
|
||||
line: 单行输出数据
|
||||
|
||||
Returns:
|
||||
Optional[dict]: 有效的 httpx 记录,或 None 如果验证失败
|
||||
|
||||
保存所有有效 URL(不再过滤状态码,安全扫描中 403/404/500 等也有分析价值)
|
||||
Optional[HttpxRecord]: 有效的 httpx 记录,或 None 如果验证失败
|
||||
"""
|
||||
try:
|
||||
# 清理 NUL 字符后再解析 JSON
|
||||
@@ -83,7 +128,6 @@ def _parse_and_validate_line(line: str) -> Optional[dict]:
|
||||
try:
|
||||
line_data = json.loads(line, strict=False)
|
||||
except json.JSONDecodeError:
|
||||
# logger.info("跳过非 JSON 行: %s", line)
|
||||
return None
|
||||
|
||||
# 验证数据类型
|
||||
@@ -91,29 +135,15 @@ def _parse_and_validate_line(line: str) -> Optional[dict]:
|
||||
logger.info("跳过非字典数据")
|
||||
return None
|
||||
|
||||
# 获取必要字段
|
||||
url = line_data.get('url', '').strip()
|
||||
status_code = line_data.get('status_code')
|
||||
# 创建记录
|
||||
record = HttpxRecord(line_data)
|
||||
|
||||
if not url:
|
||||
# 验证必要字段
|
||||
if not record.url:
|
||||
logger.info("URL 为空,跳过 - 数据: %s", str(line_data)[:200])
|
||||
return None
|
||||
|
||||
# 保存所有有效 URL(不再过滤状态码)
|
||||
return {
|
||||
'url': _sanitize_string(url),
|
||||
'host': _sanitize_string(line_data.get('host', '')),
|
||||
'status_code': status_code,
|
||||
'title': _sanitize_string(line_data.get('title', '')),
|
||||
'content_length': line_data.get('content_length', 0),
|
||||
'content_type': _sanitize_string(line_data.get('content_type', '')),
|
||||
'webserver': _sanitize_string(line_data.get('webserver', '')),
|
||||
'location': _sanitize_string(line_data.get('location', '')),
|
||||
'tech': line_data.get('tech', []),
|
||||
'response_body': _sanitize_string(line_data.get('body', '')),
|
||||
'vhost': line_data.get('vhost', False),
|
||||
'response_headers': _sanitize_string(line_data.get('raw_header', '')),
|
||||
}
|
||||
return record
|
||||
|
||||
except Exception:
|
||||
logger.info("跳过无法解析的行: %s", line[:100] if line else 'empty')
|
||||
@@ -127,7 +157,7 @@ def _parse_httpx_stream_output(
|
||||
shell: bool = False,
|
||||
timeout: Optional[int] = None,
|
||||
log_file: Optional[str] = None
|
||||
) -> Generator[dict, None, None]:
|
||||
) -> Generator[HttpxRecord, None, None]:
|
||||
"""
|
||||
流式解析 httpx 命令输出
|
||||
|
||||
@@ -140,7 +170,7 @@ def _parse_httpx_stream_output(
|
||||
log_file: 日志文件路径
|
||||
|
||||
Yields:
|
||||
dict: 每次 yield 一条存活的 URL 记录
|
||||
HttpxRecord: 每次 yield 一条存活的 URL 记录
|
||||
"""
|
||||
logger.info("开始流式解析 httpx 输出 - 命令: %s", cmd)
|
||||
|
||||
@@ -170,8 +200,8 @@ def _parse_httpx_stream_output(
|
||||
# yield 一条有效记录(存活的 URL)
|
||||
yield record
|
||||
|
||||
# 每处理 500 条记录输出一次进度
|
||||
if valid_records % 500 == 0:
|
||||
# 每处理 100 条记录输出一次进度
|
||||
if valid_records % 100 == 0:
|
||||
logger.info("已解析 %d 条存活的 URL...", valid_records)
|
||||
|
||||
except subprocess.TimeoutExpired as e:
|
||||
@@ -188,6 +218,78 @@ def _parse_httpx_stream_output(
|
||||
)
|
||||
|
||||
|
||||
def _validate_task_parameters(cmd: str, target_id: int, scan_id: int, cwd: Optional[str]) -> None:
|
||||
"""
|
||||
验证任务参数的有效性
|
||||
|
||||
Args:
|
||||
cmd: 扫描命令
|
||||
target_id: 目标ID
|
||||
scan_id: 扫描ID
|
||||
cwd: 工作目录
|
||||
|
||||
Raises:
|
||||
ValueError: 参数验证失败
|
||||
"""
|
||||
if not cmd or not cmd.strip():
|
||||
raise ValueError("扫描命令不能为空")
|
||||
|
||||
if target_id is None:
|
||||
raise ValueError("target_id 不能为 None,必须指定目标ID")
|
||||
|
||||
if scan_id is None:
|
||||
raise ValueError("scan_id 不能为 None,必须指定扫描ID")
|
||||
|
||||
# 验证工作目录(如果指定)
|
||||
if cwd and not Path(cwd).exists():
|
||||
raise ValueError(f"工作目录不存在: {cwd}")
|
||||
|
||||
|
||||
def _build_final_result(stats: dict) -> dict:
|
||||
"""
|
||||
构建最终结果并输出日志
|
||||
|
||||
Args:
|
||||
stats: 处理统计信息
|
||||
|
||||
Returns:
|
||||
dict: 最终结果
|
||||
"""
|
||||
logger.info(
|
||||
"✓ URL 验证任务完成 - 处理记录: %d(%d 批次),创建端点: %d,跳过(失败): %d",
|
||||
stats['processed_records'], stats['batch_count'], stats['created_endpoints'],
|
||||
stats['skipped_failed']
|
||||
)
|
||||
|
||||
# 如果没有创建任何记录,给出明确提示
|
||||
if stats['created_endpoints'] == 0:
|
||||
logger.warning(
|
||||
"⚠️ 没有创建任何端点记录!可能原因:1) 命令输出格式问题 2) 重复数据被忽略 3) 所有请求都失败"
|
||||
)
|
||||
|
||||
return {
|
||||
'processed_records': stats['processed_records'],
|
||||
'created_endpoints': stats['created_endpoints'],
|
||||
'skipped_failed': stats['skipped_failed']
|
||||
}
|
||||
|
||||
|
||||
def _cleanup_resources(data_generator) -> None:
|
||||
"""
|
||||
清理任务资源
|
||||
|
||||
Args:
|
||||
data_generator: 数据生成器
|
||||
"""
|
||||
# 确保生成器被正确关闭
|
||||
if data_generator is not None:
|
||||
try:
|
||||
data_generator.close()
|
||||
logger.debug("已关闭数据生成器")
|
||||
except Exception as gen_close_error:
|
||||
logger.error("关闭生成器时出错: %s", gen_close_error)
|
||||
|
||||
|
||||
def _save_batch_with_retry(
|
||||
batch: list,
|
||||
scan_id: int,
|
||||
@@ -208,14 +310,19 @@ def _save_batch_with_retry(
|
||||
max_retries: 最大重试次数
|
||||
|
||||
Returns:
|
||||
dict: {'success': bool, 'saved_count': int}
|
||||
dict: {
|
||||
'success': bool,
|
||||
'created_endpoints': int,
|
||||
'skipped_failed': int
|
||||
}
|
||||
"""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
count = _save_batch(batch, scan_id, target_id, batch_num, services)
|
||||
stats = _save_batch(batch, scan_id, target_id, batch_num, services)
|
||||
return {
|
||||
'success': True,
|
||||
'saved_count': count
|
||||
'created_endpoints': stats.get('created_endpoints', 0),
|
||||
'skipped_failed': stats.get('skipped_failed', 0)
|
||||
}
|
||||
|
||||
except IntegrityError as e:
|
||||
@@ -223,7 +330,8 @@ def _save_batch_with_retry(
|
||||
logger.error("批次 %d 数据完整性错误,跳过: %s", batch_num, str(e)[:100])
|
||||
return {
|
||||
'success': False,
|
||||
'saved_count': 0
|
||||
'created_endpoints': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
except (OperationalError, DatabaseError, InterfaceError) as e:
|
||||
@@ -254,7 +362,8 @@ def _save_batch_with_retry(
|
||||
# 理论上不会走到这里,保留兜底返回值以满足类型约束
|
||||
return {
|
||||
'success': False,
|
||||
'saved_count': 0
|
||||
'created_endpoints': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
|
||||
@@ -264,50 +373,72 @@ def _save_batch(
|
||||
target_id: int,
|
||||
batch_num: int,
|
||||
services: ServiceSet
|
||||
) -> int:
|
||||
) -> dict:
|
||||
"""
|
||||
保存一个批次的数据到数据库
|
||||
|
||||
Args:
|
||||
batch: 数据批次,list of dict
|
||||
batch: 数据批次,list of HttpxRecord
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
batch_num: 批次编号
|
||||
services: Service 集合
|
||||
|
||||
Returns:
|
||||
int: 创建的记录数
|
||||
dict: 包含创建和跳过记录的统计信息
|
||||
"""
|
||||
# 参数验证
|
||||
if not isinstance(batch, list):
|
||||
raise TypeError(f"batch 必须是 list 类型,实际: {type(batch).__name__}")
|
||||
|
||||
if not batch:
|
||||
logger.debug("批次 %d 为空,跳过处理", batch_num)
|
||||
return 0
|
||||
return {
|
||||
'created_endpoints': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
# 统计变量
|
||||
skipped_failed = 0
|
||||
|
||||
# 批量构造 Endpoint 快照 DTO
|
||||
from apps.asset.dtos.snapshot import EndpointSnapshotDTO
|
||||
|
||||
snapshots = []
|
||||
for record in batch:
|
||||
# 跳过失败的请求
|
||||
if record.failed:
|
||||
skipped_failed += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
# Endpoint URL 直接使用原始值,不做标准化
|
||||
# 原因:Endpoint URL 来自 waymore/katana,包含路径和参数,标准化可能改变含义
|
||||
url = record.input if record.input else record.url
|
||||
|
||||
# 提取 host 字段(域名或IP地址)
|
||||
host = record.host if record.host else ''
|
||||
|
||||
dto = EndpointSnapshotDTO(
|
||||
scan_id=scan_id,
|
||||
url=record['url'],
|
||||
host=record.get('host', ''),
|
||||
title=record.get('title', ''),
|
||||
status_code=record.get('status_code'),
|
||||
content_length=record.get('content_length', 0),
|
||||
location=record.get('location', ''),
|
||||
webserver=record.get('webserver', ''),
|
||||
content_type=record.get('content_type', ''),
|
||||
tech=record.get('tech', []),
|
||||
response_body=record.get('response_body', ''),
|
||||
vhost=record.get('vhost', False),
|
||||
matched_gf_patterns=[],
|
||||
target_id=target_id,
|
||||
response_headers=record.get('response_headers', ''),
|
||||
url=url,
|
||||
host=host,
|
||||
title=record.title if record.title else '',
|
||||
status_code=record.status_code,
|
||||
content_length=record.content_length,
|
||||
location=record.location if record.location else '',
|
||||
webserver=record.webserver if record.webserver else '',
|
||||
content_type=record.content_type if record.content_type else '',
|
||||
tech=record.tech if isinstance(record.tech, list) else [],
|
||||
response_body=record.response_body if record.response_body else '',
|
||||
vhost=record.vhost if record.vhost else False,
|
||||
matched_gf_patterns=[],
|
||||
response_headers=record.response_headers if record.response_headers else '',
|
||||
)
|
||||
snapshots.append(dto)
|
||||
except Exception as e:
|
||||
logger.error("处理记录失败: %s,错误: %s", record.get('url', 'Unknown'), e)
|
||||
logger.error("处理记录失败: %s,错误: %s", record.url, e)
|
||||
continue
|
||||
|
||||
if snapshots:
|
||||
@@ -316,15 +447,69 @@ def _save_batch(
|
||||
services.snapshot.save_and_sync(snapshots)
|
||||
count = len(snapshots)
|
||||
logger.info(
|
||||
"批次 %d: 保存了 %d 个存活的 URL(共 %d 个)",
|
||||
batch_num, count, len(batch)
|
||||
"批次 %d: 保存了 %d 个存活的 URL(共 %d 个,跳过失败: %d)",
|
||||
batch_num, count, len(batch), skipped_failed
|
||||
)
|
||||
return count
|
||||
return {
|
||||
'created_endpoints': count,
|
||||
'skipped_failed': skipped_failed
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error("批次 %d 批量保存失败: %s", batch_num, e)
|
||||
raise
|
||||
|
||||
return 0
|
||||
return {
|
||||
'created_endpoints': 0,
|
||||
'skipped_failed': skipped_failed
|
||||
}
|
||||
|
||||
|
||||
def _accumulate_batch_stats(total_stats: dict, batch_result: dict) -> None:
|
||||
"""
|
||||
累加批次统计信息
|
||||
|
||||
Args:
|
||||
total_stats: 总统计信息字典
|
||||
batch_result: 批次结果字典
|
||||
"""
|
||||
total_stats['created_endpoints'] += batch_result.get('created_endpoints', 0)
|
||||
total_stats['skipped_failed'] += batch_result.get('skipped_failed', 0)
|
||||
|
||||
|
||||
def _process_batch(
|
||||
batch: list,
|
||||
scan_id: int,
|
||||
target_id: int,
|
||||
batch_num: int,
|
||||
total_stats: dict,
|
||||
failed_batches: list,
|
||||
services: ServiceSet
|
||||
) -> None:
|
||||
"""
|
||||
处理单个批次
|
||||
|
||||
Args:
|
||||
batch: 数据批次
|
||||
scan_id: 扫描ID
|
||||
target_id: 目标ID
|
||||
batch_num: 批次编号
|
||||
total_stats: 总统计信息
|
||||
failed_batches: 失败批次列表
|
||||
services: Service 集合(必须,依赖注入)
|
||||
"""
|
||||
result = _save_batch_with_retry(
|
||||
batch, scan_id, target_id, batch_num, services
|
||||
)
|
||||
|
||||
# 累计统计信息(失败时可能有部分数据已保存)
|
||||
_accumulate_batch_stats(total_stats, result)
|
||||
|
||||
if not result['success']:
|
||||
failed_batches.append(batch_num)
|
||||
logger.warning(
|
||||
"批次 %d 保存失败,但已累计统计信息:创建端点=%d",
|
||||
batch_num, result.get('created_endpoints', 0)
|
||||
)
|
||||
|
||||
|
||||
def _process_records_in_batches(
|
||||
@@ -335,7 +520,7 @@ def _process_records_in_batches(
|
||||
services: ServiceSet
|
||||
) -> dict:
|
||||
"""
|
||||
分批处理记录并保存到数据库
|
||||
流式处理记录并分批保存
|
||||
|
||||
Args:
|
||||
data_generator: 数据生成器
|
||||
@@ -345,14 +530,23 @@ def _process_records_in_batches(
|
||||
services: Service 集合
|
||||
|
||||
Returns:
|
||||
dict: 处理统计结果
|
||||
dict: 处理统计信息
|
||||
|
||||
Raises:
|
||||
RuntimeError: 存在失败批次时抛出
|
||||
"""
|
||||
batch = []
|
||||
batch_num = 0
|
||||
total_records = 0
|
||||
total_saved = 0
|
||||
batch_num = 0
|
||||
failed_batches = []
|
||||
batch = []
|
||||
|
||||
# 统计信息
|
||||
total_stats = {
|
||||
'created_endpoints': 0,
|
||||
'skipped_failed': 0
|
||||
}
|
||||
|
||||
# 流式读取生成器并分批保存
|
||||
for record in data_generator:
|
||||
batch.append(record)
|
||||
total_records += 1
|
||||
@@ -360,46 +554,35 @@ def _process_records_in_batches(
|
||||
# 达到批次大小,执行保存
|
||||
if len(batch) >= batch_size:
|
||||
batch_num += 1
|
||||
result = _save_batch_with_retry(
|
||||
batch, scan_id, target_id, batch_num, services
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
total_saved += result['saved_count']
|
||||
else:
|
||||
failed_batches.append(batch_num)
|
||||
|
||||
_process_batch(batch, scan_id, target_id, batch_num, total_stats, failed_batches, services)
|
||||
batch = [] # 清空批次
|
||||
|
||||
# 每 10 个批次输出进度
|
||||
if batch_num % 10 == 0:
|
||||
logger.info(
|
||||
"进度: 已处理 %d 批次,%d 条记录,保存 %d 条",
|
||||
batch_num, total_records, total_saved
|
||||
)
|
||||
logger.info("进度: 已处理 %d 批次,%d 条记录", batch_num, total_records)
|
||||
|
||||
# 保存最后一批
|
||||
if batch:
|
||||
batch_num += 1
|
||||
result = _save_batch_with_retry(
|
||||
batch, scan_id, target_id, batch_num, services
|
||||
_process_batch(batch, scan_id, target_id, batch_num, total_stats, failed_batches, services)
|
||||
|
||||
# 检查失败批次
|
||||
if failed_batches:
|
||||
error_msg = (
|
||||
f"流式保存 URL 验证结果时出现失败批次,处理记录: {total_records},"
|
||||
f"失败批次: {failed_batches}"
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
total_saved += result['saved_count']
|
||||
else:
|
||||
failed_batches.append(batch_num)
|
||||
logger.warning(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
return {
|
||||
'processed_records': total_records,
|
||||
'saved_urls': total_saved,
|
||||
'failed_urls': total_records - total_saved,
|
||||
'batch_count': batch_num,
|
||||
'failed_batches': failed_batches
|
||||
**total_stats
|
||||
}
|
||||
|
||||
|
||||
@task(name="run_and_stream_save_urls", retries=3, retry_delay_seconds=10)
|
||||
@task(name="run_and_stream_save_urls", retries=0)
|
||||
def run_and_stream_save_urls_task(
|
||||
cmd: str,
|
||||
tool_name: str,
|
||||
@@ -407,7 +590,7 @@ def run_and_stream_save_urls_task(
|
||||
target_id: int,
|
||||
cwd: Optional[str] = None,
|
||||
shell: bool = False,
|
||||
batch_size: int = 500,
|
||||
batch_size: int = 100,
|
||||
timeout: Optional[int] = None,
|
||||
log_file: Optional[str] = None
|
||||
) -> dict:
|
||||
@@ -415,17 +598,18 @@ def run_and_stream_save_urls_task(
|
||||
执行 httpx 验证并流式保存存活的 URL
|
||||
|
||||
该任务将:
|
||||
1. 执行 httpx 命令验证 URL 存活
|
||||
2. 流式处理输出,实时解析
|
||||
3. 批量保存存活的 URL 到 Endpoint 表
|
||||
1. 验证输入参数
|
||||
2. 初始化资源(缓存、生成器)
|
||||
3. 流式处理记录并分批保存
|
||||
4. 构建并返回结果统计
|
||||
|
||||
Args:
|
||||
cmd: httpx 命令
|
||||
tool_name: 工具名称('httpx')
|
||||
scan_id: 扫描任务 ID
|
||||
target_id: 目标 ID
|
||||
cwd: 工作目录
|
||||
shell: 是否使用 shell 执行
|
||||
cwd: 工作目录(可选)
|
||||
shell: 是否使用 shell 执行(默认 False)
|
||||
batch_size: 批次大小(默认 500)
|
||||
timeout: 超时时间(秒)
|
||||
log_file: 日志文件路径
|
||||
@@ -433,11 +617,14 @@ def run_and_stream_save_urls_task(
|
||||
Returns:
|
||||
dict: {
|
||||
'processed_records': int, # 处理的记录总数
|
||||
'saved_urls': int, # 保存的存活 URL 数
|
||||
'failed_urls': int, # 失败/死链数
|
||||
'batch_count': int, # 批次数
|
||||
'failed_batches': list # 失败的批次号
|
||||
'created_endpoints': int, # 创建的端点记录数
|
||||
'skipped_failed': int, # 因请求失败跳过的记录数
|
||||
}
|
||||
|
||||
Raises:
|
||||
ValueError: 参数验证失败
|
||||
RuntimeError: 命令执行或数据库操作失败
|
||||
subprocess.TimeoutExpired: 命令执行超时
|
||||
"""
|
||||
logger.info(
|
||||
"开始执行流式 URL 验证任务 - target_id=%s, 超时=%s秒, 命令: %s",
|
||||
@@ -447,33 +634,30 @@ def run_and_stream_save_urls_task(
|
||||
data_generator = None
|
||||
|
||||
try:
|
||||
# 1. 初始化资源
|
||||
# 1. 验证参数
|
||||
_validate_task_parameters(cmd, target_id, scan_id, cwd)
|
||||
|
||||
# 2. 初始化资源
|
||||
data_generator = _parse_httpx_stream_output(
|
||||
cmd, tool_name, cwd, shell, timeout, log_file
|
||||
)
|
||||
services = ServiceSet.create_default()
|
||||
|
||||
# 2. 流式处理记录并分批保存
|
||||
# 3. 流式处理记录并分批保存
|
||||
stats = _process_records_in_batches(
|
||||
data_generator, scan_id, target_id, batch_size, services
|
||||
)
|
||||
|
||||
# 3. 输出最终统计
|
||||
logger.info(
|
||||
"✓ URL 验证任务完成 - 处理: %d, 存活: %d, 失败: %d",
|
||||
stats['processed_records'],
|
||||
stats['saved_urls'],
|
||||
stats['failed_urls']
|
||||
)
|
||||
|
||||
return stats
|
||||
# 4. 构建最终结果
|
||||
return _build_final_result(stats)
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
# 超时异常直接向上传播,保留异常类型
|
||||
logger.warning(
|
||||
"⚠️ URL 验证任务超时 - target_id=%s, 超时=%s秒",
|
||||
target_id, timeout
|
||||
)
|
||||
raise
|
||||
raise # 直接重新抛出,不包装
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"流式执行 URL 验证任务失败: {e}"
|
||||
@@ -481,12 +665,5 @@ def run_and_stream_save_urls_task(
|
||||
raise RuntimeError(error_msg) from e
|
||||
|
||||
finally:
|
||||
# 清理资源
|
||||
if data_generator is not None:
|
||||
try:
|
||||
# 确保生成器被正确关闭
|
||||
data_generator.close()
|
||||
except (GeneratorExit, StopIteration):
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning("关闭数据生成器时出错: %s", e)
|
||||
# 5. 清理资源
|
||||
_cleanup_resources(data_generator)
|
||||
|
||||
@@ -1516,7 +1516,7 @@ class TestDataGenerator:
|
||||
if batch_data:
|
||||
execute_values(cur, """
|
||||
INSERT INTO website_snapshot (
|
||||
scan_id, url, host, title, web_server, tech, status,
|
||||
scan_id, url, host, title, webserver, tech, status_code,
|
||||
content_length, content_type, location, response_body,
|
||||
response_headers, created_at
|
||||
) VALUES %s
|
||||
|
||||
@@ -27,10 +27,50 @@ BLUE='\033[0;34m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${BLUE}[XingRin]${NC} $1"; }
|
||||
log_success() { echo -e "${GREEN}[XingRin]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[XingRin]${NC} $1"; }
|
||||
log_error() { echo -e "${RED}[XingRin]${NC} $1"; }
|
||||
# 渐变色定义
|
||||
CYAN='\033[0;36m'
|
||||
MAGENTA='\033[0;35m'
|
||||
BOLD='\033[1m'
|
||||
DIM='\033[2m'
|
||||
|
||||
log_info() { echo -e "${CYAN} ▸${NC} $1"; }
|
||||
log_success() { echo -e "${GREEN} ✔${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW} ⚠${NC} $1"; }
|
||||
log_error() { echo -e "${RED} ✖${NC} $1"; }
|
||||
|
||||
# 炫酷 Banner
|
||||
show_banner() {
|
||||
echo -e ""
|
||||
echo -e "${CYAN}${BOLD} ██╗ ██╗██╗███╗ ██╗ ██████╗ ██████╗ ██╗███╗ ██╗${NC}"
|
||||
echo -e "${CYAN} ╚██╗██╔╝██║████╗ ██║██╔════╝ ██╔══██╗██║████╗ ██║${NC}"
|
||||
echo -e "${BLUE}${BOLD} ╚███╔╝ ██║██╔██╗ ██║██║ ███╗██████╔╝██║██╔██╗ ██║${NC}"
|
||||
echo -e "${BLUE} ██╔██╗ ██║██║╚██╗██║██║ ██║██╔══██╗██║██║╚██╗██║${NC}"
|
||||
echo -e "${MAGENTA}${BOLD} ██╔╝ ██╗██║██║ ╚████║╚██████╔╝██║ ██║██║██║ ╚████║${NC}"
|
||||
echo -e "${MAGENTA} ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝${NC}"
|
||||
echo -e ""
|
||||
echo -e "${DIM} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e "${BOLD} 🚀 分布式安全扫描平台 │ Worker 节点部署${NC}"
|
||||
echo -e "${DIM} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
|
||||
echo -e ""
|
||||
}
|
||||
|
||||
# 完成 Banner
|
||||
show_complete() {
|
||||
echo -e ""
|
||||
echo -e "${GREEN}${BOLD} ╔═══════════════════════════════════════════════════╗${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██████╗ ██████╗ ███╗ ██╗███████╗██╗ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██╔══██╗██╔═══██╗████╗ ██║██╔════╝██║ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██║ ██║██║ ██║██╔██╗ ██║█████╗ ██║ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██║ ██║██║ ██║██║╚██╗██║██╔══╝ ╚═╝ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██████╔╝╚██████╔╝██║ ╚████║███████╗██╗ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝╚═╝ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ✨ XingRin Worker 节点部署完成! ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${NC}"
|
||||
echo -e "${GREEN}${BOLD} ╚═══════════════════════════════════════════════════╝${NC}"
|
||||
echo -e ""
|
||||
}
|
||||
|
||||
# 等待 apt 锁释放
|
||||
wait_for_apt_lock() {
|
||||
@@ -150,9 +190,7 @@ pull_image() {
|
||||
|
||||
# 主流程
|
||||
main() {
|
||||
log_info "=========================================="
|
||||
log_info " XingRin 节点安装"
|
||||
log_info "=========================================="
|
||||
show_banner
|
||||
|
||||
detect_os
|
||||
install_docker
|
||||
@@ -162,9 +200,7 @@ main() {
|
||||
|
||||
touch "$DOCKER_MARKER"
|
||||
|
||||
log_success "=========================================="
|
||||
log_success " ✓ 安装完成"
|
||||
log_success "=========================================="
|
||||
show_complete
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
||||
@@ -8,7 +8,7 @@ services:
|
||||
build:
|
||||
context: ./postgres
|
||||
dockerfile: Dockerfile
|
||||
image: ${DOCKER_USER:-yyhuni}/xingrin-postgres:15
|
||||
image: ${DOCKER_USER:-yyhuni}/xingrin-postgres:${IMAGE_TAG:-dev}
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: ${DB_NAME}
|
||||
|
||||
@@ -14,7 +14,7 @@ services:
|
||||
build:
|
||||
context: ./postgres
|
||||
dockerfile: Dockerfile
|
||||
image: ${DOCKER_USER:-yyhuni}/xingrin-postgres:15
|
||||
image: ${DOCKER_USER:-yyhuni}/xingrin-postgres:${IMAGE_TAG:?IMAGE_TAG is required}
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_DB: ${DB_NAME}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.10-slim
|
||||
FROM python:3.10-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -11,7 +11,16 @@ RUN apt-get update && apt-get install -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# 安装 Docker CLI(用于本地 Worker 任务分发)
|
||||
RUN curl -fsSL https://get.docker.com | sh
|
||||
# 只安装 docker-ce-cli,避免安装完整 Docker 引擎
|
||||
RUN apt-get update && \
|
||||
apt-get install -y ca-certificates gnupg && \
|
||||
install -m 0755 -d /etc/apt/keyrings && \
|
||||
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg && \
|
||||
chmod a+r /etc/apt/keyrings/docker.gpg && \
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian bookworm stable" > /etc/apt/sources.list.d/docker.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y docker-ce-cli && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# 安装 uv(超快的 Python 包管理器)
|
||||
RUN pip install uv
|
||||
|
||||
@@ -15,10 +15,12 @@ NC='\033[0m'
|
||||
# 解析参数
|
||||
WITH_FRONTEND=true
|
||||
DEV_MODE=false
|
||||
QUIET_MODE=false
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--no-frontend) WITH_FRONTEND=false ;;
|
||||
--dev) DEV_MODE=true ;;
|
||||
--quiet) QUIET_MODE=true ;;
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -155,6 +157,11 @@ echo -e "${GREEN}[OK]${NC} 服务已启动"
|
||||
# 数据初始化
|
||||
./scripts/init-data.sh
|
||||
|
||||
# 静默模式下不显示结果(由调用方显示)
|
||||
if [ "$QUIET_MODE" = true ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# 获取访问地址
|
||||
PUBLIC_HOST=$(grep "^PUBLIC_HOST=" .env 2>/dev/null | cut -d= -f2)
|
||||
if [ -n "$PUBLIC_HOST" ] && [ "$PUBLIC_HOST" != "server" ]; then
|
||||
|
||||
@@ -67,6 +67,45 @@ const DEFAULT_FIELDS: FilterField[] = [
|
||||
PREDEFINED_FIELDS.host,
|
||||
]
|
||||
|
||||
// History storage key
|
||||
const FILTER_HISTORY_KEY = 'smart_filter_history'
|
||||
const MAX_HISTORY_PER_FIELD = 10
|
||||
|
||||
// Get history values for a field
|
||||
function getFieldHistory(field: string): string[] {
|
||||
if (typeof window === 'undefined') return []
|
||||
try {
|
||||
const history = JSON.parse(localStorage.getItem(FILTER_HISTORY_KEY) || '{}')
|
||||
return history[field] || []
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
// Save a value to field history
|
||||
function saveFieldHistory(field: string, value: string) {
|
||||
if (typeof window === 'undefined' || !value.trim()) return
|
||||
try {
|
||||
const history = JSON.parse(localStorage.getItem(FILTER_HISTORY_KEY) || '{}')
|
||||
const fieldHistory = (history[field] || []).filter((v: string) => v !== value)
|
||||
fieldHistory.unshift(value)
|
||||
history[field] = fieldHistory.slice(0, MAX_HISTORY_PER_FIELD)
|
||||
localStorage.setItem(FILTER_HISTORY_KEY, JSON.stringify(history))
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Extract field-value pairs from query and save to history
|
||||
function saveQueryHistory(query: string) {
|
||||
const regex = /(\w+)(==|!=|=)"([^"]+)"/g
|
||||
let match
|
||||
while ((match = regex.exec(query)) !== null) {
|
||||
const [, field, , value] = match
|
||||
saveFieldHistory(field, value)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse filter expression (FOFA style)
|
||||
interface ParsedFilter {
|
||||
field: string
|
||||
@@ -115,10 +154,114 @@ export function SmartFilterInput({
|
||||
const [open, setOpen] = React.useState(false)
|
||||
const [inputValue, setInputValue] = React.useState(value ?? "")
|
||||
const inputRef = React.useRef<HTMLInputElement>(null)
|
||||
const ghostRef = React.useRef<HTMLSpanElement>(null)
|
||||
const listRef = React.useRef<HTMLDivElement>(null)
|
||||
const savedScrollTop = React.useRef<number | null>(null)
|
||||
const hasInitialized = React.useRef(false)
|
||||
|
||||
// Calculate ghost text suggestion
|
||||
const ghostText = React.useMemo(() => {
|
||||
if (!inputValue) return ""
|
||||
|
||||
// Get the last word/token being typed
|
||||
const lastSpaceIndex = inputValue.lastIndexOf(' ')
|
||||
const currentToken = lastSpaceIndex === -1 ? inputValue : inputValue.slice(lastSpaceIndex + 1)
|
||||
const lowerToken = currentToken.toLowerCase()
|
||||
|
||||
// If empty token after space, check if previous expression is complete
|
||||
if (!currentToken && inputValue.trim()) {
|
||||
// Check if last expression is complete (ends with ")
|
||||
if (inputValue.trimEnd().endsWith('"')) {
|
||||
return '&& '
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
if (!currentToken) return ""
|
||||
|
||||
// Priority 1: Field name completion (no = in token)
|
||||
if (!currentToken.includes('=') && !currentToken.includes('!')) {
|
||||
// Find matching field first
|
||||
const matchingField = fields.find(f =>
|
||||
f.key.toLowerCase().startsWith(lowerToken) &&
|
||||
f.key.toLowerCase() !== lowerToken
|
||||
)
|
||||
if (matchingField) {
|
||||
return matchingField.key.slice(currentToken.length) + '="'
|
||||
}
|
||||
|
||||
// If exact match of field name, suggest ="
|
||||
const exactField = fields.find(f => f.key.toLowerCase() === lowerToken)
|
||||
if (exactField) {
|
||||
return '="'
|
||||
}
|
||||
|
||||
// Priority 2: Logical operators (only if no field matches)
|
||||
if ('&&'.startsWith(currentToken) && currentToken.startsWith('&')) {
|
||||
return '&&'.slice(currentToken.length) + ' '
|
||||
}
|
||||
if ('||'.startsWith(currentToken) && currentToken.startsWith('|')) {
|
||||
return '||'.slice(currentToken.length) + ' '
|
||||
}
|
||||
// 'and' / 'or' only if no field name starts with these
|
||||
if (!matchingField) {
|
||||
if ('and'.startsWith(lowerToken) && lowerToken.length > 0 && !fields.some(f => f.key.toLowerCase().startsWith(lowerToken))) {
|
||||
return 'and'.slice(lowerToken.length) + ' '
|
||||
}
|
||||
if ('or'.startsWith(lowerToken) && lowerToken.length > 0 && !fields.some(f => f.key.toLowerCase().startsWith(lowerToken))) {
|
||||
return 'or'.slice(lowerToken.length) + ' '
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check if typing ! for != operator
|
||||
if (currentToken.match(/^(\w+)!$/)) {
|
||||
return '="'
|
||||
}
|
||||
|
||||
// Check if typing = and might want ==
|
||||
const singleEqMatch = currentToken.match(/^(\w+)=$/)
|
||||
if (singleEqMatch) {
|
||||
// Suggest " for fuzzy match (most common)
|
||||
return '"'
|
||||
}
|
||||
|
||||
// Check if typed == or != (no opening quote yet)
|
||||
const doubleOpMatch = currentToken.match(/^(\w+)(==|!=)$/)
|
||||
if (doubleOpMatch) {
|
||||
return '"'
|
||||
}
|
||||
|
||||
// Check if typing a value (has = and opening quote)
|
||||
const eqMatch = currentToken.match(/^(\w+)(==|!=|=)"([^"]*)$/)
|
||||
if (eqMatch) {
|
||||
const [, field, , partialValue] = eqMatch
|
||||
// Get history for this field
|
||||
const history = getFieldHistory(field)
|
||||
// Find matching history value
|
||||
const matchingValue = history.find(v =>
|
||||
v.toLowerCase().startsWith(partialValue.toLowerCase()) &&
|
||||
v.toLowerCase() !== partialValue.toLowerCase()
|
||||
)
|
||||
if (matchingValue) {
|
||||
return matchingValue.slice(partialValue.length) + '"'
|
||||
}
|
||||
// If value has content but no closing quote, suggest closing quote
|
||||
if (partialValue.length > 0) {
|
||||
return '"'
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a complete expression just finished (ends with ")
|
||||
if (currentToken.match(/^\w+(==|!=|=)"[^"]+"$/)) {
|
||||
return ' && '
|
||||
}
|
||||
|
||||
return ""
|
||||
}, [inputValue, fields])
|
||||
|
||||
// Synchronize external value changes
|
||||
React.useEffect(() => {
|
||||
if (value !== undefined) {
|
||||
@@ -189,12 +332,27 @@ export function SmartFilterInput({
|
||||
|
||||
// Handle search
|
||||
const handleSearch = () => {
|
||||
// Save query values to history
|
||||
saveQueryHistory(inputValue)
|
||||
onSearch?.(parsedFilters, inputValue)
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
// Accept ghost text suggestion
|
||||
const acceptGhostText = () => {
|
||||
if (ghostText) {
|
||||
setInputValue(inputValue + ghostText)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Handle keyboard events
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === "Tab" && ghostText) {
|
||||
e.preventDefault()
|
||||
acceptGhostText()
|
||||
}
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSearch()
|
||||
@@ -202,6 +360,14 @@ export function SmartFilterInput({
|
||||
if (e.key === "Escape") {
|
||||
setOpen(false)
|
||||
}
|
||||
// Right arrow at end of input accepts ghost text
|
||||
if (e.key === "ArrowRight" && ghostText) {
|
||||
const input = inputRef.current
|
||||
if (input && input.selectionStart === input.value.length) {
|
||||
e.preventDefault()
|
||||
acceptGhostText()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Append example to input box (not overwrite), then close popover
|
||||
@@ -215,36 +381,46 @@ export function SmartFilterInput({
|
||||
|
||||
return (
|
||||
<div className={className}>
|
||||
<Popover open={open} onOpenChange={setOpen} modal={false}>
|
||||
<PopoverAnchor asChild>
|
||||
<div className="flex items-center gap-2">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={inputValue}
|
||||
onChange={(e) => {
|
||||
setInputValue(e.target.value)
|
||||
if (!open) setOpen(true)
|
||||
}}
|
||||
onFocus={() => setOpen(true)}
|
||||
onBlur={(e) => {
|
||||
// If focus moves to inside Popover or input itself, don't close
|
||||
const relatedTarget = e.relatedTarget as HTMLElement | null
|
||||
if (relatedTarget?.closest('[data-radix-popper-content-wrapper]')) {
|
||||
return
|
||||
}
|
||||
// Delay close to let CommandItem's onSelect execute first
|
||||
setTimeout(() => setOpen(false), 150)
|
||||
}}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder={placeholder || defaultPlaceholder}
|
||||
className="h-8 w-full"
|
||||
/>
|
||||
<Button variant="outline" size="sm" onClick={handleSearch}>
|
||||
<IconSearch className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</PopoverAnchor>
|
||||
<div className="flex items-center gap-2">
|
||||
<Popover open={open} onOpenChange={setOpen} modal={false}>
|
||||
<PopoverAnchor asChild>
|
||||
<div className="relative flex-1">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
type="text"
|
||||
value={inputValue}
|
||||
onChange={(e) => {
|
||||
setInputValue(e.target.value)
|
||||
if (!open) setOpen(true)
|
||||
}}
|
||||
onFocus={() => setOpen(true)}
|
||||
onBlur={(e) => {
|
||||
// If focus moves to inside Popover or input itself, don't close
|
||||
const relatedTarget = e.relatedTarget as HTMLElement | null
|
||||
if (relatedTarget?.closest('[data-radix-popper-content-wrapper]')) {
|
||||
return
|
||||
}
|
||||
// Delay close to let CommandItem's onSelect execute first
|
||||
setTimeout(() => setOpen(false), 150)
|
||||
}}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder={placeholder || defaultPlaceholder}
|
||||
className="h-8 w-full font-mono text-sm"
|
||||
/>
|
||||
{/* Ghost text overlay */}
|
||||
{ghostText && (
|
||||
<div
|
||||
className="absolute inset-0 flex items-center pointer-events-none overflow-hidden px-3"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<span className="font-mono text-sm">
|
||||
<span className="invisible">{inputValue}</span>
|
||||
<span ref={ghostRef} className="text-muted-foreground/40">{ghostText}</span>
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</PopoverAnchor>
|
||||
<PopoverContent
|
||||
className="w-[var(--radix-popover-trigger-width)] p-0"
|
||||
align="start"
|
||||
@@ -343,6 +519,10 @@ export function SmartFilterInput({
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<Button variant="outline" size="sm" onClick={handleSearch}>
|
||||
<IconSearch className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export { SearchPage } from "./search-page"
|
||||
export { SearchResultCard } from "./search-result-card"
|
||||
export { SearchPagination } from "./search-pagination"
|
||||
export { SearchResultsTable } from "./search-results-table"
|
||||
|
||||
@@ -1,56 +1,141 @@
|
||||
"use client"
|
||||
|
||||
import { useState, useCallback } from "react"
|
||||
import { useState, useCallback, useMemo, useEffect } from "react"
|
||||
import { useSearchParams } from "next/navigation"
|
||||
import { motion, AnimatePresence } from "framer-motion"
|
||||
import { Search, AlertCircle } from "lucide-react"
|
||||
import { Search, AlertCircle, History, X, Download } from "lucide-react"
|
||||
import { useTranslations } from "next-intl"
|
||||
import { toast } from "sonner"
|
||||
import { SmartFilterInput, type FilterField } from "@/components/common/smart-filter-input"
|
||||
import { SearchResultCard } from "./search-result-card"
|
||||
import { SearchPagination } from "./search-pagination"
|
||||
import { useAssetSearch } from "@/hooks/use-search"
|
||||
import { VulnerabilityDetailDialog } from "@/components/vulnerabilities/vulnerability-detail-dialog"
|
||||
import { VulnerabilityService } from "@/services/vulnerability.service"
|
||||
import type { SearchParams, SearchState, Vulnerability as SearchVuln } from "@/types/search.types"
|
||||
import { SearchService } from "@/services/search.service"
|
||||
import type { SearchParams, SearchState, Vulnerability as SearchVuln, AssetType } from "@/types/search.types"
|
||||
import type { Vulnerability } from "@/types/vulnerability.types"
|
||||
import { Alert, AlertDescription } from "@/components/ui/alert"
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"
|
||||
import { SearchResultsTable } from "./search-results-table"
|
||||
import { SearchResultCard } from "./search-result-card"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
// 搜索示例 - 展示各种查询语法
|
||||
const SEARCH_FILTER_EXAMPLES = [
|
||||
// 模糊匹配 (=)
|
||||
// Website 搜索示例
|
||||
const WEBSITE_SEARCH_EXAMPLES = [
|
||||
'host="api"',
|
||||
'title="Dashboard"',
|
||||
'tech="nginx"',
|
||||
// 精确匹配 (==)
|
||||
'status=="200"',
|
||||
'host=="admin.example.com"',
|
||||
// 不等于 (!=)
|
||||
'status!="404"',
|
||||
'host!="test"',
|
||||
// AND 组合 (&&)
|
||||
'host="api" && status=="200"',
|
||||
'tech="nginx" && title="Dashboard"',
|
||||
'host="admin" && tech="php" && status=="200"',
|
||||
// OR 组合 (||)
|
||||
'tech="vue" || tech="react"',
|
||||
'status=="200" || status=="301"',
|
||||
'host="admin" || host="manage"',
|
||||
// 混合查询
|
||||
'host="api" && (tech="nginx" || tech="apache")',
|
||||
'(status=="200" || status=="301") && tech="vue"',
|
||||
'host="example" && status!="404" && tech="nginx"',
|
||||
'host="admin" && tech="php" && status=="200"',
|
||||
'status!="404"',
|
||||
]
|
||||
|
||||
// Endpoint 搜索示例
|
||||
const ENDPOINT_SEARCH_EXAMPLES = [
|
||||
'host="api"',
|
||||
'url="/api/v1"',
|
||||
'title="Dashboard"',
|
||||
'tech="nginx"',
|
||||
'status=="200"',
|
||||
'host="api" && status=="200"',
|
||||
'url="/admin" && status=="200"',
|
||||
'tech="vue" || tech="react"',
|
||||
]
|
||||
|
||||
// 快捷搜索标签
|
||||
const QUICK_SEARCH_TAGS = [
|
||||
{ label: 'status=="200"', query: 'status=="200"' },
|
||||
{ label: 'tech="nginx"', query: 'tech="nginx"' },
|
||||
{ label: 'tech="php"', query: 'tech="php"' },
|
||||
{ label: 'tech="vue"', query: 'tech="vue"' },
|
||||
{ label: 'tech="react"', query: 'tech="react"' },
|
||||
{ label: 'status=="403"', query: 'status=="403"' },
|
||||
]
|
||||
|
||||
// 最近搜索本地存储 key
|
||||
const RECENT_SEARCHES_KEY = 'xingrin_recent_searches'
|
||||
const MAX_RECENT_SEARCHES = 5
|
||||
|
||||
// 获取最近搜索记录
|
||||
function getRecentSearches(): string[] {
|
||||
if (typeof window === 'undefined') return []
|
||||
try {
|
||||
const saved = localStorage.getItem(RECENT_SEARCHES_KEY)
|
||||
return saved ? JSON.parse(saved) : []
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
// 保存搜索记录
|
||||
function saveRecentSearch(query: string) {
|
||||
if (typeof window === 'undefined' || !query.trim()) return
|
||||
try {
|
||||
const searches = getRecentSearches().filter(s => s !== query)
|
||||
searches.unshift(query)
|
||||
localStorage.setItem(
|
||||
RECENT_SEARCHES_KEY,
|
||||
JSON.stringify(searches.slice(0, MAX_RECENT_SEARCHES))
|
||||
)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// 删除搜索记录
|
||||
function removeRecentSearch(query: string) {
|
||||
if (typeof window === 'undefined') return
|
||||
try {
|
||||
const searches = getRecentSearches().filter(s => s !== query)
|
||||
localStorage.setItem(RECENT_SEARCHES_KEY, JSON.stringify(searches))
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
export function SearchPage() {
|
||||
const t = useTranslations('search')
|
||||
const urlSearchParams = useSearchParams()
|
||||
const [searchState, setSearchState] = useState<SearchState>("initial")
|
||||
const [query, setQuery] = useState("")
|
||||
const [assetType, setAssetType] = useState<AssetType>("website")
|
||||
const [searchParams, setSearchParams] = useState<SearchParams>({})
|
||||
const [page, setPage] = useState(1)
|
||||
const [pageSize, setPageSize] = useState(10)
|
||||
const [selectedVuln, setSelectedVuln] = useState<Vulnerability | null>(null)
|
||||
const [vulnDialogOpen, setVulnDialogOpen] = useState(false)
|
||||
const [loadingVuln, setLoadingVuln] = useState(false)
|
||||
const [, setLoadingVuln] = useState(false)
|
||||
const [recentSearches, setRecentSearches] = useState<string[]>([])
|
||||
const [initialQueryProcessed, setInitialQueryProcessed] = useState(false)
|
||||
|
||||
// 加载最近搜索记录
|
||||
useEffect(() => {
|
||||
setRecentSearches(getRecentSearches())
|
||||
}, [])
|
||||
|
||||
// 处理 URL 参数中的搜索查询
|
||||
useEffect(() => {
|
||||
if (initialQueryProcessed) return
|
||||
|
||||
const q = urlSearchParams.get('q')
|
||||
if (q) {
|
||||
setQuery(q)
|
||||
setSearchParams({ q, asset_type: assetType })
|
||||
setSearchState("searching")
|
||||
saveRecentSearch(q)
|
||||
setRecentSearches(getRecentSearches())
|
||||
}
|
||||
setInitialQueryProcessed(true)
|
||||
}, [urlSearchParams, assetType, initialQueryProcessed])
|
||||
|
||||
// 根据资产类型选择搜索示例
|
||||
const searchExamples = useMemo(() => {
|
||||
return assetType === 'endpoint' ? ENDPOINT_SEARCH_EXAMPLES : WEBSITE_SEARCH_EXAMPLES
|
||||
}, [assetType])
|
||||
|
||||
// 搜索过滤字段配置
|
||||
const SEARCH_FILTER_FIELDS: FilterField[] = [
|
||||
@@ -73,16 +158,71 @@ export function SearchPage() {
|
||||
if (!rawQuery.trim()) return
|
||||
|
||||
setQuery(rawQuery)
|
||||
setSearchParams({ q: rawQuery })
|
||||
setSearchParams({ q: rawQuery, asset_type: assetType })
|
||||
setPage(1)
|
||||
setSearchState("searching")
|
||||
|
||||
// 保存到最近搜索
|
||||
saveRecentSearch(rawQuery)
|
||||
setRecentSearches(getRecentSearches())
|
||||
}, [assetType])
|
||||
|
||||
// 处理快捷标签点击
|
||||
const handleQuickTagClick = useCallback((tagQuery: string) => {
|
||||
setQuery(tagQuery)
|
||||
}, [])
|
||||
|
||||
// 处理最近搜索点击
|
||||
const handleRecentSearchClick = useCallback((recentQuery: string) => {
|
||||
setQuery(recentQuery)
|
||||
setSearchParams({ q: recentQuery, asset_type: assetType })
|
||||
setPage(1)
|
||||
setSearchState("searching")
|
||||
saveRecentSearch(recentQuery)
|
||||
setRecentSearches(getRecentSearches())
|
||||
}, [assetType])
|
||||
|
||||
// 删除最近搜索
|
||||
const handleRemoveRecentSearch = useCallback((e: React.MouseEvent, searchQuery: string) => {
|
||||
e.stopPropagation()
|
||||
removeRecentSearch(searchQuery)
|
||||
setRecentSearches(getRecentSearches())
|
||||
}, [])
|
||||
|
||||
// 导出状态
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
// 导出 CSV(调用后端 API 导出全部结果)
|
||||
const handleExportCSV = useCallback(async () => {
|
||||
if (!searchParams.q) return
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
await SearchService.exportCSV(searchParams.q, assetType)
|
||||
toast.success(t('exportSuccess'))
|
||||
} catch (error) {
|
||||
console.error('Export failed:', error)
|
||||
toast.error(t('exportFailed'))
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [searchParams.q, assetType, t])
|
||||
|
||||
// 当数据加载完成时更新状态
|
||||
if (searchState === "searching" && data && !isLoading) {
|
||||
setSearchState("results")
|
||||
}
|
||||
|
||||
const handleAssetTypeChange = useCallback((value: AssetType) => {
|
||||
setAssetType(value)
|
||||
// 清空搜索结果
|
||||
if (searchState === "results") {
|
||||
setSearchState("initial")
|
||||
setSearchParams({})
|
||||
setQuery("")
|
||||
}
|
||||
}, [searchState])
|
||||
|
||||
const handlePageChange = useCallback((newPage: number) => {
|
||||
setPage(newPage)
|
||||
}, [])
|
||||
@@ -107,6 +247,19 @@ export function SearchPage() {
|
||||
}
|
||||
}, [t])
|
||||
|
||||
// 资产类型选择器组件
|
||||
const AssetTypeSelector = (
|
||||
<Select value={assetType} onValueChange={handleAssetTypeChange}>
|
||||
<SelectTrigger size="sm" className="w-[100px]">
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="website">{t('assetTypes.website')}</SelectItem>
|
||||
<SelectItem value="endpoint">{t('assetTypes.endpoint')}</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)
|
||||
|
||||
return (
|
||||
<div className="flex-1 w-full flex flex-col">
|
||||
<AnimatePresence mode="wait">
|
||||
@@ -117,26 +270,90 @@ export function SearchPage() {
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
exit={{ opacity: 0, y: -50 }}
|
||||
transition={{ duration: 0.3 }}
|
||||
className="flex-1 flex flex-col items-center justify-center px-4 -mt-50"
|
||||
className="flex-1 flex flex-col items-center justify-center px-4 relative overflow-hidden"
|
||||
>
|
||||
<div className="flex flex-col items-center gap-6 w-full max-w-2xl">
|
||||
<h1 className="text-3xl font-semibold text-foreground flex items-center gap-3">
|
||||
<Search className="h-8 w-8" />
|
||||
{t('title')}
|
||||
</h1>
|
||||
{/* 背景装饰 */}
|
||||
<div className="absolute inset-0 -z-10 overflow-hidden pointer-events-none">
|
||||
<div className="absolute left-1/2 top-1/4 -translate-x-1/2 h-[400px] w-[600px] rounded-full bg-primary/5 blur-3xl" />
|
||||
<div className="absolute right-1/4 top-1/2 h-[200px] w-[300px] rounded-full bg-primary/3 blur-2xl" />
|
||||
</div>
|
||||
|
||||
<SmartFilterInput
|
||||
fields={SEARCH_FILTER_FIELDS}
|
||||
examples={SEARCH_FILTER_EXAMPLES}
|
||||
placeholder='host="api" && tech="nginx" && status=="200"'
|
||||
value={query}
|
||||
onSearch={handleSearch}
|
||||
className="w-full [&_input]:h-12 [&_input]:text-base [&_button]:h-12 [&_button]:w-12 [&_button]:p-0"
|
||||
/>
|
||||
<div className="flex flex-col items-center gap-6 w-full max-w-3xl -mt-16">
|
||||
{/* 标题 */}
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<div className="flex items-center justify-center w-16 h-16 rounded-2xl bg-primary/10 mb-2">
|
||||
<Search className="h-8 w-8 text-primary" />
|
||||
</div>
|
||||
<h1 className="text-3xl font-semibold text-foreground">
|
||||
{t('title')}
|
||||
</h1>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{t('hint')}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{t('hint')}
|
||||
</p>
|
||||
{/* 搜索框 */}
|
||||
<div className="flex items-center gap-3 w-full">
|
||||
{AssetTypeSelector}
|
||||
<SmartFilterInput
|
||||
fields={SEARCH_FILTER_FIELDS}
|
||||
examples={searchExamples}
|
||||
placeholder='host="api" && tech="nginx" && status=="200"'
|
||||
value={query}
|
||||
onSearch={handleSearch}
|
||||
className="flex-1"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* 快捷搜索标签 */}
|
||||
<div className="flex flex-wrap justify-center gap-2">
|
||||
{QUICK_SEARCH_TAGS.map((tag) => (
|
||||
<Badge
|
||||
key={tag.query}
|
||||
variant="outline"
|
||||
className="cursor-pointer hover:bg-accent transition-colors px-3 py-1"
|
||||
onClick={() => handleQuickTagClick(tag.query)}
|
||||
>
|
||||
{tag.label}
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* 最近搜索 */}
|
||||
{recentSearches.length > 0 && (
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
transition={{ delay: 0.3 }}
|
||||
className="w-full max-w-xl mt-2"
|
||||
>
|
||||
<div className="flex items-center gap-2 text-xs text-muted-foreground mb-2">
|
||||
<History className="h-3.5 w-3.5" />
|
||||
<span>{t('recentSearches')}</span>
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{recentSearches.map((search) => (
|
||||
<Badge
|
||||
key={search}
|
||||
variant="secondary"
|
||||
className={cn(
|
||||
"cursor-pointer hover:bg-secondary/80 transition-colors",
|
||||
"pl-3 pr-1.5 py-1 gap-1 group"
|
||||
)}
|
||||
onClick={() => handleRecentSearchClick(search)}
|
||||
>
|
||||
<span className="font-mono text-xs truncate max-w-[200px]">{search}</span>
|
||||
<button
|
||||
onClick={(e) => handleRemoveRecentSearch(e, search)}
|
||||
className="ml-1 p-0.5 rounded hover:bg-muted-foreground/20 opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
>
|
||||
<X className="h-3 w-3" />
|
||||
</button>
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
</div>
|
||||
</motion.div>
|
||||
)}
|
||||
@@ -172,10 +389,11 @@ export function SearchPage() {
|
||||
transition={{ duration: 0.3, delay: 0.1 }}
|
||||
className="sticky top-0 z-10 bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60 border-b px-4 py-3"
|
||||
>
|
||||
<div className="flex items-center gap-3 max-w-4xl mx-auto">
|
||||
<div className="flex items-center gap-3">
|
||||
{AssetTypeSelector}
|
||||
<SmartFilterInput
|
||||
fields={SEARCH_FILTER_FIELDS}
|
||||
examples={SEARCH_FILTER_EXAMPLES}
|
||||
examples={searchExamples}
|
||||
placeholder='host="api" && tech="nginx" && status=="200"'
|
||||
value={query}
|
||||
onSearch={handleSearch}
|
||||
@@ -184,12 +402,21 @@ export function SearchPage() {
|
||||
<span className="text-sm text-muted-foreground whitespace-nowrap">
|
||||
{isFetching ? t('loading') : t('resultsCount', { count: data?.total ?? 0 })}
|
||||
</span>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleExportCSV}
|
||||
disabled={!data?.results || data.results.length === 0 || isExporting}
|
||||
>
|
||||
<Download className="h-4 w-4 mr-1.5" />
|
||||
{isExporting ? t('exporting') : t('export')}
|
||||
</Button>
|
||||
</div>
|
||||
</motion.div>
|
||||
|
||||
{/* 错误提示 */}
|
||||
{error && (
|
||||
<div className="p-4 max-w-4xl mx-auto w-full">
|
||||
<div className="p-4 w-full">
|
||||
<Alert variant="destructive">
|
||||
<AlertCircle className="h-4 w-4" />
|
||||
<AlertDescription>
|
||||
@@ -212,39 +439,41 @@ export function SearchPage() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* 搜索结果列表 */}
|
||||
{/* 搜索结果 */}
|
||||
{!error && data && data.results.length > 0 && (
|
||||
<>
|
||||
<div className="flex-1 overflow-auto p-4">
|
||||
<div className="max-w-4xl mx-auto space-y-4">
|
||||
{data.results.map((result, index) => (
|
||||
<motion.div
|
||||
key={`${result.url}-${index}`}
|
||||
initial={{ opacity: 0, y: 20 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.3, delay: index * 0.05 }}
|
||||
>
|
||||
<SearchResultCard
|
||||
result={result}
|
||||
{assetType === 'website' ? (
|
||||
// Website 使用卡片样式
|
||||
<div className="space-y-4 max-w-4xl mx-auto">
|
||||
{data.results.map((result) => (
|
||||
<SearchResultCard
|
||||
key={result.id}
|
||||
result={result}
|
||||
onViewVulnerability={handleViewVulnerability}
|
||||
/>
|
||||
</motion.div>
|
||||
))}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
// Endpoint 使用表格样式
|
||||
<SearchResultsTable
|
||||
results={data.results}
|
||||
assetType={assetType}
|
||||
onViewVulnerability={handleViewVulnerability}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* 分页控制 */}
|
||||
<div className="border-t px-4 py-3">
|
||||
<div className="max-w-4xl mx-auto">
|
||||
<SearchPagination
|
||||
page={page}
|
||||
pageSize={pageSize}
|
||||
total={data.total}
|
||||
totalPages={data.totalPages}
|
||||
onPageChange={handlePageChange}
|
||||
onPageSizeChange={handlePageSizeChange}
|
||||
/>
|
||||
</div>
|
||||
<SearchPagination
|
||||
page={page}
|
||||
pageSize={pageSize}
|
||||
total={data.total}
|
||||
totalPages={data.totalPages}
|
||||
onPageChange={handlePageChange}
|
||||
onPageSizeChange={handlePageSizeChange}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -25,7 +25,12 @@ import {
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip"
|
||||
import type { SearchResult, Vulnerability } from "@/types/search.types"
|
||||
import type { SearchResult, Vulnerability, WebsiteSearchResult } from "@/types/search.types"
|
||||
|
||||
// 类型守卫:检查是否为 WebsiteSearchResult
|
||||
function isWebsiteResult(result: SearchResult): result is WebsiteSearchResult {
|
||||
return 'vulnerabilities' in result
|
||||
}
|
||||
|
||||
interface SearchResultCardProps {
|
||||
result: SearchResult
|
||||
@@ -41,6 +46,15 @@ const severityColors: Record<string, string> = {
|
||||
info: "bg-[#848d97]/10 text-[#848d97] border border-[#848d97]/20",
|
||||
}
|
||||
|
||||
// 状态码 Badge variant
|
||||
function getStatusVariant(status: number | null): "default" | "secondary" | "destructive" | "outline" {
|
||||
if (!status) return "outline"
|
||||
if (status >= 200 && status < 300) return "default"
|
||||
if (status >= 300 && status < 400) return "secondary"
|
||||
if (status >= 400) return "destructive"
|
||||
return "outline"
|
||||
}
|
||||
|
||||
export function SearchResultCard({ result, onViewVulnerability }: SearchResultCardProps) {
|
||||
const t = useTranslations('search.card')
|
||||
const [vulnOpen, setVulnOpen] = useState(false)
|
||||
@@ -54,8 +68,16 @@ export function SearchResultCard({ result, onViewVulnerability }: SearchResultCa
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
// 格式化字节数
|
||||
const formatBytes = (bytes: number | null) => {
|
||||
if (bytes === null || bytes === undefined) return null
|
||||
if (bytes < 1024) return `${bytes} B`
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`
|
||||
}
|
||||
|
||||
// 检测内容是否溢出
|
||||
const maxHeight = 26 * 4 // 4行高度 (badge ~22px + gap 4px)
|
||||
const maxHeight = 26 * 4
|
||||
|
||||
useEffect(() => {
|
||||
const el = containerRef.current
|
||||
@@ -82,27 +104,51 @@ export function SearchResultCard({ result, onViewVulnerability }: SearchResultCa
|
||||
return (
|
||||
<Card className="overflow-hidden py-0 gap-0">
|
||||
<CardContent className="p-0">
|
||||
{/* 顶部 URL 栏 */}
|
||||
<h3 className="font-semibold text-sm px-4 py-2 bg-muted/30 border-b break-all">
|
||||
{result.url || result.host}
|
||||
</h3>
|
||||
{/* 顶部 URL + Badge 行 */}
|
||||
<div className="px-4 py-2 bg-muted/30 border-b space-y-2">
|
||||
<h3 className="font-mono text-sm break-all">
|
||||
{result.url || result.host}
|
||||
</h3>
|
||||
{/* Badge 行 */}
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<Badge variant={getStatusVariant(result.statusCode)} className="font-mono text-xs">
|
||||
{result.statusCode ?? '-'}
|
||||
</Badge>
|
||||
{result.webserver && (
|
||||
<Badge variant="outline" className="font-mono text-xs">
|
||||
{result.webserver}
|
||||
</Badge>
|
||||
)}
|
||||
{result.contentType && (
|
||||
<Badge variant="outline" className="font-mono text-xs">
|
||||
{result.contentType.split(';')[0]}
|
||||
</Badge>
|
||||
)}
|
||||
{formatBytes(result.contentLength) && (
|
||||
<Badge variant="outline" className="font-mono text-xs">
|
||||
{formatBytes(result.contentLength)}
|
||||
</Badge>
|
||||
)}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 中间左右分栏 */}
|
||||
<div className="flex flex-col md:flex-row">
|
||||
{/* 左侧信息区 */}
|
||||
<div className="w-full md:w-2/5 px-4 pt-2 pb-3 border-b md:border-b-0 md:border-r flex flex-col">
|
||||
<div className="w-full md:w-[320px] md:shrink-0 px-4 py-3 border-b md:border-b-0 md:border-r flex flex-col">
|
||||
<div className="space-y-1.5 text-sm">
|
||||
<div className="flex items-center h-[28px]">
|
||||
<span className="text-muted-foreground w-12 shrink-0">{t('title')}</span>
|
||||
<span className="font-medium truncate" title={result.title}>{result.title || '-'}</span>
|
||||
<div className="flex items-baseline">
|
||||
<span className="text-muted-foreground w-12 shrink-0">Title</span>
|
||||
<span className="truncate" title={result.title}>{result.title || '-'}</span>
|
||||
</div>
|
||||
<div className="flex items-center">
|
||||
<div className="flex items-baseline">
|
||||
<span className="text-muted-foreground w-12 shrink-0">Host</span>
|
||||
<span className="font-mono text-sm truncate" title={result.host}>{result.host || '-'}</span>
|
||||
<span className="font-mono truncate" title={result.host}>{result.host || '-'}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Technologies 直接显示 */}
|
||||
{/* Technologies */}
|
||||
{result.technologies && result.technologies.length > 0 && (
|
||||
<div className="mt-3 flex flex-col gap-1">
|
||||
<div
|
||||
@@ -143,7 +189,7 @@ export function SearchResultCard({ result, onViewVulnerability }: SearchResultCa
|
||||
</div>
|
||||
|
||||
{/* 右侧 Tab 区 */}
|
||||
<div className="w-full md:w-3/5 flex flex-col">
|
||||
<div className="w-full md:flex-1 flex flex-col">
|
||||
<Tabs defaultValue="header" className="w-full h-full flex flex-col gap-0">
|
||||
<TabsList className="h-[28px] gap-4 rounded-none border-b bg-transparent px-4 pt-1">
|
||||
<TabsTrigger
|
||||
@@ -158,6 +204,14 @@ export function SearchResultCard({ result, onViewVulnerability }: SearchResultCa
|
||||
>
|
||||
Body
|
||||
</TabsTrigger>
|
||||
{result.location && (
|
||||
<TabsTrigger
|
||||
value="location"
|
||||
className="h-full rounded-none border-b-2 border-transparent border-x-0 border-t-0 bg-transparent px-1 text-sm shadow-none focus-visible:ring-0 focus-visible:outline-none data-[state=active]:border-b-primary data-[state=active]:bg-transparent data-[state=active]:shadow-none"
|
||||
>
|
||||
Location
|
||||
</TabsTrigger>
|
||||
)}
|
||||
</TabsList>
|
||||
<TabsContent value="header" className="flex-1 overflow-auto bg-muted/30 px-4 py-2 max-h-[200px]">
|
||||
<pre className="text-xs font-mono whitespace-pre-wrap">
|
||||
@@ -169,12 +223,19 @@ export function SearchResultCard({ result, onViewVulnerability }: SearchResultCa
|
||||
{result.responseBody || '-'}
|
||||
</pre>
|
||||
</TabsContent>
|
||||
{result.location && (
|
||||
<TabsContent value="location" className="flex-1 overflow-auto bg-muted/30 px-4 py-2 max-h-[200px]">
|
||||
<pre className="text-xs font-mono whitespace-pre-wrap">
|
||||
{result.location}
|
||||
</pre>
|
||||
</TabsContent>
|
||||
)}
|
||||
</Tabs>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* 底部漏洞区 */}
|
||||
{result.vulnerabilities && result.vulnerabilities.length > 0 && (
|
||||
{/* 底部漏洞区 - 仅 Website 类型显示 */}
|
||||
{isWebsiteResult(result) && result.vulnerabilities && result.vulnerabilities.length > 0 && (
|
||||
<div className="border-t">
|
||||
<Collapsible open={vulnOpen} onOpenChange={setVulnOpen}>
|
||||
<CollapsibleTrigger className="flex items-center gap-1 px-4 py-2 text-sm text-muted-foreground hover:text-foreground transition-colors w-full">
|
||||
|
||||
285
frontend/components/search/search-results-table.tsx
Normal file
285
frontend/components/search/search-results-table.tsx
Normal file
@@ -0,0 +1,285 @@
|
||||
"use client"
|
||||
|
||||
import { useMemo } from "react"
|
||||
import { useFormatter } from "next-intl"
|
||||
import type { ColumnDef } from "@tanstack/react-table"
|
||||
import { Badge } from "@/components/ui/badge"
|
||||
import { DataTableColumnHeader, UnifiedDataTable } from "@/components/ui/data-table"
|
||||
import { ExpandableCell, ExpandableTagList } from "@/components/ui/data-table/expandable-cell"
|
||||
import type { SearchResult, AssetType, Vulnerability, EndpointSearchResult } from "@/types/search.types"
|
||||
|
||||
interface SearchResultsTableProps {
|
||||
results: SearchResult[]
|
||||
assetType: AssetType
|
||||
onViewVulnerability?: (vuln: Vulnerability) => void
|
||||
}
|
||||
|
||||
export function SearchResultsTable({ results, assetType }: SearchResultsTableProps) {
|
||||
const format = useFormatter()
|
||||
|
||||
const formatDate = (dateString: string) => {
|
||||
return format.dateTime(new Date(dateString), {
|
||||
year: 'numeric',
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
// 基础列定义(Website 和 Endpoint 共用)
|
||||
const baseColumns: ColumnDef<SearchResult, unknown>[] = useMemo(() => [
|
||||
{
|
||||
id: "url",
|
||||
accessorKey: "url",
|
||||
meta: { title: "URL" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="URL" />
|
||||
),
|
||||
size: 350,
|
||||
minSize: 200,
|
||||
maxSize: 600,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("url")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "host",
|
||||
accessorKey: "host",
|
||||
meta: { title: "Host" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Host" />
|
||||
),
|
||||
size: 180,
|
||||
minSize: 100,
|
||||
maxSize: 250,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("host")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "title",
|
||||
accessorKey: "title",
|
||||
meta: { title: "Title" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Title" />
|
||||
),
|
||||
size: 150,
|
||||
minSize: 100,
|
||||
maxSize: 300,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("title")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "statusCode",
|
||||
accessorKey: "statusCode",
|
||||
meta: { title: "Status" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Status" />
|
||||
),
|
||||
size: 80,
|
||||
minSize: 60,
|
||||
maxSize: 100,
|
||||
cell: ({ row }) => {
|
||||
const statusCode = row.getValue("statusCode") as number | null
|
||||
if (!statusCode) return <span className="text-muted-foreground">-</span>
|
||||
|
||||
let variant: "default" | "secondary" | "destructive" | "outline" = "outline"
|
||||
if (statusCode >= 200 && statusCode < 300) {
|
||||
variant = "outline"
|
||||
} else if (statusCode >= 300 && statusCode < 400) {
|
||||
variant = "secondary"
|
||||
} else if (statusCode >= 400 && statusCode < 500) {
|
||||
variant = "default"
|
||||
} else if (statusCode >= 500) {
|
||||
variant = "destructive"
|
||||
}
|
||||
|
||||
return <Badge variant={variant} className="font-mono">{statusCode}</Badge>
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "technologies",
|
||||
accessorKey: "technologies",
|
||||
meta: { title: "Tech" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Tech" />
|
||||
),
|
||||
size: 180,
|
||||
minSize: 120,
|
||||
cell: ({ row }) => {
|
||||
const tech = row.getValue("technologies") as string[] | null
|
||||
if (!tech || tech.length === 0) return <span className="text-muted-foreground">-</span>
|
||||
return <ExpandableTagList items={tech} maxLines={2} variant="outline" />
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "contentLength",
|
||||
accessorKey: "contentLength",
|
||||
meta: { title: "Length" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Length" />
|
||||
),
|
||||
size: 100,
|
||||
minSize: 80,
|
||||
maxSize: 150,
|
||||
cell: ({ row }) => {
|
||||
const len = row.getValue("contentLength") as number | null
|
||||
if (len === null || len === undefined) return <span className="text-muted-foreground">-</span>
|
||||
return <span className="font-mono tabular-nums">{new Intl.NumberFormat().format(len)}</span>
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "location",
|
||||
accessorKey: "location",
|
||||
meta: { title: "Location" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Location" />
|
||||
),
|
||||
size: 150,
|
||||
minSize: 100,
|
||||
maxSize: 300,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("location")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "webserver",
|
||||
accessorKey: "webserver",
|
||||
meta: { title: "Server" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Server" />
|
||||
),
|
||||
size: 120,
|
||||
minSize: 80,
|
||||
maxSize: 200,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("webserver")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "contentType",
|
||||
accessorKey: "contentType",
|
||||
meta: { title: "Type" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Type" />
|
||||
),
|
||||
size: 120,
|
||||
minSize: 80,
|
||||
maxSize: 200,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("contentType")} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "responseBody",
|
||||
accessorKey: "responseBody",
|
||||
meta: { title: "Body" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Body" />
|
||||
),
|
||||
size: 300,
|
||||
minSize: 200,
|
||||
cell: ({ row }) => (
|
||||
<ExpandableCell value={row.getValue("responseBody")} maxLines={3} />
|
||||
),
|
||||
},
|
||||
{
|
||||
id: "responseHeaders",
|
||||
accessorKey: "responseHeaders",
|
||||
meta: { title: "Headers" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Headers" />
|
||||
),
|
||||
size: 250,
|
||||
minSize: 150,
|
||||
maxSize: 400,
|
||||
cell: ({ row }) => {
|
||||
const headers = row.getValue("responseHeaders") as Record<string, string> | null
|
||||
if (!headers || Object.keys(headers).length === 0) {
|
||||
return <span className="text-muted-foreground">-</span>
|
||||
}
|
||||
const headersStr = Object.entries(headers)
|
||||
.map(([k, v]) => `${k}: ${v}`)
|
||||
.join('\n')
|
||||
return <ExpandableCell value={headersStr} maxLines={3} />
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "vhost",
|
||||
accessorKey: "vhost",
|
||||
meta: { title: "VHost" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="VHost" />
|
||||
),
|
||||
size: 80,
|
||||
minSize: 60,
|
||||
maxSize: 100,
|
||||
cell: ({ row }) => {
|
||||
const vhost = row.getValue("vhost") as boolean | null
|
||||
if (vhost === null || vhost === undefined) return <span className="text-muted-foreground">-</span>
|
||||
return <span className="font-mono text-sm">{vhost ? "true" : "false"}</span>
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "createdAt",
|
||||
accessorKey: "createdAt",
|
||||
meta: { title: "Created" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="Created" />
|
||||
),
|
||||
size: 150,
|
||||
minSize: 120,
|
||||
maxSize: 200,
|
||||
cell: ({ row }) => {
|
||||
const createdAt = row.getValue("createdAt") as string | null
|
||||
if (!createdAt) return <span className="text-muted-foreground">-</span>
|
||||
return <span className="text-sm">{formatDate(createdAt)}</span>
|
||||
},
|
||||
},
|
||||
], [formatDate])
|
||||
|
||||
// Endpoint 特有列
|
||||
const endpointColumns: ColumnDef<SearchResult, unknown>[] = useMemo(() => [
|
||||
{
|
||||
id: "matchedGfPatterns",
|
||||
accessorKey: "matchedGfPatterns",
|
||||
meta: { title: "GF Patterns" },
|
||||
header: ({ column }) => (
|
||||
<DataTableColumnHeader column={column} title="GF Patterns" />
|
||||
),
|
||||
size: 150,
|
||||
minSize: 100,
|
||||
maxSize: 250,
|
||||
cell: ({ row }) => {
|
||||
const patterns = (row.original as EndpointSearchResult).matchedGfPatterns
|
||||
if (!patterns || patterns.length === 0) return <span className="text-muted-foreground">-</span>
|
||||
return <ExpandableTagList items={patterns} maxLines={2} variant="secondary" />
|
||||
},
|
||||
},
|
||||
], [])
|
||||
|
||||
// 根据资产类型组合列
|
||||
const columns = useMemo(() => {
|
||||
if (assetType === 'endpoint') {
|
||||
// 在 technologies 后面插入 gfPatterns
|
||||
const techIndex = baseColumns.findIndex(col => col.id === 'technologies')
|
||||
const cols = [...baseColumns]
|
||||
cols.splice(techIndex + 1, 0, ...endpointColumns)
|
||||
return cols
|
||||
}
|
||||
return baseColumns
|
||||
}, [assetType, baseColumns, endpointColumns])
|
||||
|
||||
return (
|
||||
<UnifiedDataTable
|
||||
columns={columns}
|
||||
data={results}
|
||||
getRowId={(row) => String(row.id)}
|
||||
hideToolbar
|
||||
hidePagination
|
||||
enableRowSelection={false}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -325,6 +325,18 @@
|
||||
"noResults": "No matching assets found",
|
||||
"noResultsHint": "Try adjusting your search criteria",
|
||||
"vulnLoadError": "Failed to load vulnerability details",
|
||||
"recentSearches": "Recent Searches",
|
||||
"export": "Export",
|
||||
"exporting": "Exporting...",
|
||||
"exportSuccess": "Export successful",
|
||||
"exportFailed": "Export failed",
|
||||
"stats": {
|
||||
"vulnerabilities": "Vulnerabilities"
|
||||
},
|
||||
"assetTypes": {
|
||||
"website": "Website",
|
||||
"endpoint": "Endpoint"
|
||||
},
|
||||
"fields": {
|
||||
"host": "Hostname",
|
||||
"url": "URL address",
|
||||
@@ -334,6 +346,22 @@
|
||||
"body": "Response body content",
|
||||
"header": "Response header content"
|
||||
},
|
||||
"table": {
|
||||
"url": "URL",
|
||||
"host": "Host",
|
||||
"title": "Title",
|
||||
"status": "Status",
|
||||
"technologies": "Technologies",
|
||||
"contentLength": "Content Length",
|
||||
"location": "Location",
|
||||
"webserver": "Web Server",
|
||||
"contentType": "Content Type",
|
||||
"responseBody": "Response Body",
|
||||
"responseHeaders": "Response Headers",
|
||||
"vhost": "VHost",
|
||||
"createdAt": "Created At",
|
||||
"gfPatterns": "GF Patterns"
|
||||
},
|
||||
"card": {
|
||||
"title": "Title",
|
||||
"expand": "Expand",
|
||||
@@ -1947,6 +1975,16 @@
|
||||
"formatInvalid": "Invalid format"
|
||||
}
|
||||
},
|
||||
"globalSearch": {
|
||||
"search": "Search",
|
||||
"placeholder": "Search assets... (host=\"api\" && tech=\"nginx\")",
|
||||
"noResults": "No results found",
|
||||
"searchFor": "Search for",
|
||||
"recent": "Recent Searches",
|
||||
"quickSearch": "Quick Search",
|
||||
"hint": "Supports FOFA-style syntax",
|
||||
"toSearch": "to search"
|
||||
},
|
||||
"errors": {
|
||||
"unknown": "Operation failed, please try again later",
|
||||
"validation": "Invalid input data",
|
||||
|
||||
@@ -325,6 +325,18 @@
|
||||
"noResults": "未找到匹配的资产",
|
||||
"noResultsHint": "请尝试调整搜索条件",
|
||||
"vulnLoadError": "加载漏洞详情失败",
|
||||
"recentSearches": "最近搜索",
|
||||
"export": "导出",
|
||||
"exporting": "导出中...",
|
||||
"exportSuccess": "导出成功",
|
||||
"exportFailed": "导出失败",
|
||||
"stats": {
|
||||
"vulnerabilities": "漏洞"
|
||||
},
|
||||
"assetTypes": {
|
||||
"website": "网站",
|
||||
"endpoint": "URL"
|
||||
},
|
||||
"fields": {
|
||||
"host": "主机名",
|
||||
"url": "URL 地址",
|
||||
@@ -334,6 +346,22 @@
|
||||
"body": "响应体内容",
|
||||
"header": "响应头内容"
|
||||
},
|
||||
"table": {
|
||||
"url": "URL",
|
||||
"host": "主机名",
|
||||
"title": "标题",
|
||||
"status": "状态码",
|
||||
"technologies": "技术栈",
|
||||
"contentLength": "内容长度",
|
||||
"location": "跳转地址",
|
||||
"webserver": "Web 服务器",
|
||||
"contentType": "内容类型",
|
||||
"responseBody": "响应体",
|
||||
"responseHeaders": "响应头",
|
||||
"vhost": "VHost",
|
||||
"createdAt": "创建时间",
|
||||
"gfPatterns": "GF 模式"
|
||||
},
|
||||
"card": {
|
||||
"title": "标题",
|
||||
"expand": "展开",
|
||||
@@ -1947,6 +1975,16 @@
|
||||
"formatInvalid": "格式无效"
|
||||
}
|
||||
},
|
||||
"globalSearch": {
|
||||
"search": "搜索",
|
||||
"placeholder": "搜索资产... (host=\"api\" && tech=\"nginx\")",
|
||||
"noResults": "未找到结果",
|
||||
"searchFor": "搜索",
|
||||
"recent": "最近搜索",
|
||||
"quickSearch": "快捷搜索",
|
||||
"hint": "支持 FOFA 风格语法",
|
||||
"toSearch": "搜索"
|
||||
},
|
||||
"errors": {
|
||||
"unknown": "操作失败,请稍后重试",
|
||||
"validation": "输入数据无效",
|
||||
|
||||
22
frontend/mock/data/auth.ts
Normal file
22
frontend/mock/data/auth.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { User, MeResponse, LoginResponse, LogoutResponse } from '@/types/auth.types'
|
||||
|
||||
export const mockUser: User = {
|
||||
id: 1,
|
||||
username: 'admin',
|
||||
isStaff: true,
|
||||
isSuperuser: true,
|
||||
}
|
||||
|
||||
export const mockMeResponse: MeResponse = {
|
||||
authenticated: true,
|
||||
user: mockUser,
|
||||
}
|
||||
|
||||
export const mockLoginResponse: LoginResponse = {
|
||||
message: 'Login successful',
|
||||
user: mockUser,
|
||||
}
|
||||
|
||||
export const mockLogoutResponse: LogoutResponse = {
|
||||
message: 'Logout successful',
|
||||
}
|
||||
78
frontend/mock/data/engines.ts
Normal file
78
frontend/mock/data/engines.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { ScanEngine } from '@/types/engine.types'
|
||||
|
||||
export const mockEngines: ScanEngine[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'Full Scan',
|
||||
configuration: `# Full reconnaissance scan
|
||||
stages:
|
||||
- name: subdomain_discovery
|
||||
tools:
|
||||
- subfinder
|
||||
- amass
|
||||
- name: port_scan
|
||||
tools:
|
||||
- nmap
|
||||
- name: web_crawling
|
||||
tools:
|
||||
- httpx
|
||||
- katana
|
||||
- name: vulnerability_scan
|
||||
tools:
|
||||
- nuclei
|
||||
`,
|
||||
createdAt: '2024-01-15T08:00:00Z',
|
||||
updatedAt: '2024-12-20T10:30:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'Quick Scan',
|
||||
configuration: `# Quick scan - subdomain and web only
|
||||
stages:
|
||||
- name: subdomain_discovery
|
||||
tools:
|
||||
- subfinder
|
||||
- name: web_crawling
|
||||
tools:
|
||||
- httpx
|
||||
`,
|
||||
createdAt: '2024-02-10T09:00:00Z',
|
||||
updatedAt: '2024-12-18T14:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'Vulnerability Only',
|
||||
configuration: `# Vulnerability scan only
|
||||
stages:
|
||||
- name: vulnerability_scan
|
||||
tools:
|
||||
- nuclei
|
||||
options:
|
||||
severity: critical,high,medium
|
||||
`,
|
||||
createdAt: '2024-03-05T11:00:00Z',
|
||||
updatedAt: '2024-12-15T16:20:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'Subdomain Discovery',
|
||||
configuration: `# Subdomain enumeration only
|
||||
stages:
|
||||
- name: subdomain_discovery
|
||||
tools:
|
||||
- subfinder
|
||||
- amass
|
||||
- findomain
|
||||
`,
|
||||
createdAt: '2024-04-12T08:30:00Z',
|
||||
updatedAt: '2024-12-10T09:00:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockEngines(): ScanEngine[] {
|
||||
return mockEngines
|
||||
}
|
||||
|
||||
export function getMockEngineById(id: number): ScanEngine | undefined {
|
||||
return mockEngines.find(e => e.id === id)
|
||||
}
|
||||
110
frontend/mock/data/notifications.ts
Normal file
110
frontend/mock/data/notifications.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import type { BackendNotification, GetNotificationsResponse } from '@/types/notification.types'
|
||||
|
||||
export const mockNotifications: BackendNotification[] = [
|
||||
{
|
||||
id: 1,
|
||||
category: 'vulnerability',
|
||||
title: 'Critical Vulnerability Found',
|
||||
message: 'SQL Injection detected in retailmax.com/product endpoint',
|
||||
level: 'critical',
|
||||
createdAt: '2024-12-29T10:30:00Z',
|
||||
isRead: false,
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
category: 'scan',
|
||||
title: 'Scan Completed',
|
||||
message: 'Scan for acme.com completed successfully with 23 vulnerabilities found',
|
||||
level: 'medium',
|
||||
createdAt: '2024-12-29T09:00:00Z',
|
||||
isRead: false,
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
category: 'vulnerability',
|
||||
title: 'High Severity Vulnerability',
|
||||
message: 'XSS vulnerability found in acme.com/search',
|
||||
level: 'high',
|
||||
createdAt: '2024-12-28T16:45:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
category: 'scan',
|
||||
title: 'Scan Failed',
|
||||
message: 'Scan for globalfinance.com failed: Connection timeout',
|
||||
level: 'high',
|
||||
createdAt: '2024-12-28T14:20:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
category: 'asset',
|
||||
title: 'New Subdomains Discovered',
|
||||
message: '15 new subdomains discovered for techstart.io',
|
||||
level: 'low',
|
||||
createdAt: '2024-12-27T11:00:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
{
|
||||
id: 6,
|
||||
category: 'system',
|
||||
title: 'Worker Offline',
|
||||
message: 'Worker node worker-03 is now offline',
|
||||
level: 'medium',
|
||||
createdAt: '2024-12-27T08:30:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
{
|
||||
id: 7,
|
||||
category: 'scan',
|
||||
title: 'Scheduled Scan Started',
|
||||
message: 'Scheduled scan for Acme Corporation started',
|
||||
level: 'low',
|
||||
createdAt: '2024-12-26T06:00:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
{
|
||||
id: 8,
|
||||
category: 'system',
|
||||
title: 'System Update Available',
|
||||
message: 'A new version of the scanner is available',
|
||||
level: 'low',
|
||||
createdAt: '2024-12-25T10:00:00Z',
|
||||
isRead: true,
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockNotifications(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
unread?: boolean
|
||||
}): GetNotificationsResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
|
||||
let filtered = mockNotifications
|
||||
|
||||
if (params?.unread) {
|
||||
filtered = filtered.filter(n => !n.isRead)
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockUnreadCount(): { count: number } {
|
||||
return {
|
||||
count: mockNotifications.filter(n => !n.isRead).length,
|
||||
}
|
||||
}
|
||||
132
frontend/mock/data/scheduled-scans.ts
Normal file
132
frontend/mock/data/scheduled-scans.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import type { ScheduledScan, GetScheduledScansResponse } from '@/types/scheduled-scan.types'
|
||||
|
||||
export const mockScheduledScans: ScheduledScan[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'Daily Acme Scan',
|
||||
engineIds: [1],
|
||||
engineNames: ['Full Scan'],
|
||||
organizationId: 1,
|
||||
organizationName: 'Acme Corporation',
|
||||
targetId: null,
|
||||
targetName: null,
|
||||
scanMode: 'organization',
|
||||
cronExpression: '0 2 * * *',
|
||||
isEnabled: true,
|
||||
nextRunTime: '2024-12-30T02:00:00Z',
|
||||
lastRunTime: '2024-12-29T02:00:00Z',
|
||||
runCount: 45,
|
||||
createdAt: '2024-11-15T08:00:00Z',
|
||||
updatedAt: '2024-12-29T02:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'Weekly TechStart Vuln Scan',
|
||||
engineIds: [3],
|
||||
engineNames: ['Vulnerability Only'],
|
||||
organizationId: 2,
|
||||
organizationName: 'TechStart Inc',
|
||||
targetId: null,
|
||||
targetName: null,
|
||||
scanMode: 'organization',
|
||||
cronExpression: '0 3 * * 0',
|
||||
isEnabled: true,
|
||||
nextRunTime: '2025-01-05T03:00:00Z',
|
||||
lastRunTime: '2024-12-29T03:00:00Z',
|
||||
runCount: 12,
|
||||
createdAt: '2024-10-01T10:00:00Z',
|
||||
updatedAt: '2024-12-29T03:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'Hourly API Monitoring',
|
||||
engineIds: [2],
|
||||
engineNames: ['Quick Scan'],
|
||||
organizationId: null,
|
||||
organizationName: null,
|
||||
targetId: 12,
|
||||
targetName: 'api.acme.com',
|
||||
scanMode: 'target',
|
||||
cronExpression: '0 * * * *',
|
||||
isEnabled: true,
|
||||
nextRunTime: '2024-12-29T12:00:00Z',
|
||||
lastRunTime: '2024-12-29T11:00:00Z',
|
||||
runCount: 720,
|
||||
createdAt: '2024-12-01T00:00:00Z',
|
||||
updatedAt: '2024-12-29T11:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'Monthly Full Scan - Finance',
|
||||
engineIds: [1],
|
||||
engineNames: ['Full Scan'],
|
||||
organizationId: 3,
|
||||
organizationName: 'Global Finance Ltd',
|
||||
targetId: null,
|
||||
targetName: null,
|
||||
scanMode: 'organization',
|
||||
cronExpression: '0 0 1 * *',
|
||||
isEnabled: false,
|
||||
nextRunTime: '2025-01-01T00:00:00Z',
|
||||
lastRunTime: '2024-12-01T00:00:00Z',
|
||||
runCount: 6,
|
||||
createdAt: '2024-06-01T08:00:00Z',
|
||||
updatedAt: '2024-12-20T15:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: 'RetailMax Daily Quick',
|
||||
engineIds: [2, 3],
|
||||
engineNames: ['Quick Scan', 'Vulnerability Only'],
|
||||
organizationId: null,
|
||||
organizationName: null,
|
||||
targetId: 8,
|
||||
targetName: 'retailmax.com',
|
||||
scanMode: 'target',
|
||||
cronExpression: '0 4 * * *',
|
||||
isEnabled: true,
|
||||
nextRunTime: '2024-12-30T04:00:00Z',
|
||||
lastRunTime: '2024-12-29T04:00:00Z',
|
||||
runCount: 30,
|
||||
createdAt: '2024-11-29T09:00:00Z',
|
||||
updatedAt: '2024-12-29T04:00:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockScheduledScans(params?: {
|
||||
page?: number
|
||||
pageSize?: number
|
||||
search?: string
|
||||
}): GetScheduledScansResponse {
|
||||
const page = params?.page || 1
|
||||
const pageSize = params?.pageSize || 10
|
||||
const search = params?.search?.toLowerCase() || ''
|
||||
|
||||
let filtered = mockScheduledScans
|
||||
|
||||
if (search) {
|
||||
filtered = filtered.filter(
|
||||
s =>
|
||||
s.name.toLowerCase().includes(search) ||
|
||||
s.organizationName?.toLowerCase().includes(search) ||
|
||||
s.targetName?.toLowerCase().includes(search)
|
||||
)
|
||||
}
|
||||
|
||||
const total = filtered.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = filtered.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
totalPages,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockScheduledScanById(id: number): ScheduledScan | undefined {
|
||||
return mockScheduledScans.find(s => s.id === id)
|
||||
}
|
||||
78
frontend/mock/data/workers.ts
Normal file
78
frontend/mock/data/workers.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { WorkerNode, WorkersResponse } from '@/types/worker.types'
|
||||
|
||||
export const mockWorkers: WorkerNode[] = [
|
||||
{
|
||||
id: 1,
|
||||
name: 'local-worker',
|
||||
ipAddress: '127.0.0.1',
|
||||
sshPort: 22,
|
||||
username: 'root',
|
||||
status: 'online',
|
||||
isLocal: true,
|
||||
createdAt: '2024-01-01T00:00:00Z',
|
||||
updatedAt: '2024-12-29T10:00:00Z',
|
||||
info: {
|
||||
cpuPercent: 23.5,
|
||||
memoryPercent: 45.2,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: 'worker-01',
|
||||
ipAddress: '192.168.1.101',
|
||||
sshPort: 22,
|
||||
username: 'scanner',
|
||||
status: 'online',
|
||||
isLocal: false,
|
||||
createdAt: '2024-06-15T08:00:00Z',
|
||||
updatedAt: '2024-12-29T09:30:00Z',
|
||||
info: {
|
||||
cpuPercent: 56.8,
|
||||
memoryPercent: 72.1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: 'worker-02',
|
||||
ipAddress: '192.168.1.102',
|
||||
sshPort: 22,
|
||||
username: 'scanner',
|
||||
status: 'online',
|
||||
isLocal: false,
|
||||
createdAt: '2024-07-20T10:00:00Z',
|
||||
updatedAt: '2024-12-29T09:45:00Z',
|
||||
info: {
|
||||
cpuPercent: 34.2,
|
||||
memoryPercent: 58.9,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: 'worker-03',
|
||||
ipAddress: '192.168.1.103',
|
||||
sshPort: 22,
|
||||
username: 'scanner',
|
||||
status: 'offline',
|
||||
isLocal: false,
|
||||
createdAt: '2024-08-10T14:00:00Z',
|
||||
updatedAt: '2024-12-28T16:00:00Z',
|
||||
},
|
||||
]
|
||||
|
||||
export function getMockWorkers(page = 1, pageSize = 10): WorkersResponse {
|
||||
const total = mockWorkers.length
|
||||
const totalPages = Math.ceil(total / pageSize)
|
||||
const start = (page - 1) * pageSize
|
||||
const results = mockWorkers.slice(start, start + pageSize)
|
||||
|
||||
return {
|
||||
results,
|
||||
total,
|
||||
page,
|
||||
pageSize,
|
||||
}
|
||||
}
|
||||
|
||||
export function getMockWorkerById(id: number): WorkerNode | undefined {
|
||||
return mockWorkers.find(w => w.id === id)
|
||||
}
|
||||
@@ -69,3 +69,39 @@ export {
|
||||
getMockSubdomains,
|
||||
getMockSubdomainById,
|
||||
} from './data/subdomains'
|
||||
|
||||
// Auth
|
||||
export {
|
||||
mockUser,
|
||||
mockMeResponse,
|
||||
mockLoginResponse,
|
||||
mockLogoutResponse,
|
||||
} from './data/auth'
|
||||
|
||||
// Engines
|
||||
export {
|
||||
mockEngines,
|
||||
getMockEngines,
|
||||
getMockEngineById,
|
||||
} from './data/engines'
|
||||
|
||||
// Workers
|
||||
export {
|
||||
mockWorkers,
|
||||
getMockWorkers,
|
||||
getMockWorkerById,
|
||||
} from './data/workers'
|
||||
|
||||
// Notifications
|
||||
export {
|
||||
mockNotifications,
|
||||
getMockNotifications,
|
||||
getMockUnreadCount,
|
||||
} from './data/notifications'
|
||||
|
||||
// Scheduled Scans
|
||||
export {
|
||||
mockScheduledScans,
|
||||
getMockScheduledScans,
|
||||
getMockScheduledScanById,
|
||||
} from './data/scheduled-scans'
|
||||
|
||||
@@ -10,11 +10,16 @@ import type {
|
||||
ChangePasswordRequest,
|
||||
ChangePasswordResponse
|
||||
} from '@/types/auth.types'
|
||||
import { USE_MOCK, mockDelay, mockLoginResponse, mockLogoutResponse, mockMeResponse } from '@/mock'
|
||||
|
||||
/**
|
||||
* User login
|
||||
*/
|
||||
export async function login(data: LoginRequest): Promise<LoginResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return mockLoginResponse
|
||||
}
|
||||
const res = await api.post<LoginResponse>('/auth/login/', data)
|
||||
return res.data
|
||||
}
|
||||
@@ -23,6 +28,10 @@ export async function login(data: LoginRequest): Promise<LoginResponse> {
|
||||
* User logout
|
||||
*/
|
||||
export async function logout(): Promise<LogoutResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return mockLogoutResponse
|
||||
}
|
||||
const res = await api.post<LogoutResponse>('/auth/logout/')
|
||||
return res.data
|
||||
}
|
||||
@@ -31,6 +40,10 @@ export async function logout(): Promise<LogoutResponse> {
|
||||
* Get current user information
|
||||
*/
|
||||
export async function getMe(): Promise<MeResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return mockMeResponse
|
||||
}
|
||||
const res = await api.get<MeResponse>('/auth/me/')
|
||||
return res.data
|
||||
}
|
||||
@@ -39,6 +52,10 @@ export async function getMe(): Promise<MeResponse> {
|
||||
* Change password
|
||||
*/
|
||||
export async function changePassword(data: ChangePasswordRequest): Promise<ChangePasswordResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return { message: 'Password changed successfully' }
|
||||
}
|
||||
const res = await api.post<ChangePasswordResponse>('/auth/change-password/', data)
|
||||
return res.data
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import apiClient from '@/lib/api-client'
|
||||
import type { ScanEngine } from '@/types/engine.types'
|
||||
import { USE_MOCK, mockDelay, getMockEngines, getMockEngineById } from '@/mock'
|
||||
|
||||
/**
|
||||
* Engine API service
|
||||
@@ -9,6 +10,10 @@ import type { ScanEngine } from '@/types/engine.types'
|
||||
* Get engine list
|
||||
*/
|
||||
export async function getEngines(): Promise<ScanEngine[]> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockEngines()
|
||||
}
|
||||
// Engines are usually not many, get all
|
||||
const response = await apiClient.get('/engines/', {
|
||||
params: { pageSize: 1000 }
|
||||
@@ -21,6 +26,12 @@ export async function getEngines(): Promise<ScanEngine[]> {
|
||||
* Get engine details
|
||||
*/
|
||||
export async function getEngine(id: number): Promise<ScanEngine> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
const engine = getMockEngineById(id)
|
||||
if (!engine) throw new Error('Engine not found')
|
||||
return engine
|
||||
}
|
||||
const response = await apiClient.get(`/engines/${id}/`)
|
||||
return response.data
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
GetNotificationsRequest,
|
||||
GetNotificationsResponse,
|
||||
} from '@/types/notification.types'
|
||||
import { USE_MOCK, mockDelay, getMockNotifications, getMockUnreadCount } from '@/mock'
|
||||
|
||||
export class NotificationService {
|
||||
/**
|
||||
@@ -18,6 +19,10 @@ export class NotificationService {
|
||||
static async getNotifications(
|
||||
params: GetNotificationsRequest = {}
|
||||
): Promise<GetNotificationsResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockNotifications(params)
|
||||
}
|
||||
const response = await api.get<GetNotificationsResponse>('/notifications/', {
|
||||
params,
|
||||
})
|
||||
@@ -29,6 +34,10 @@ export class NotificationService {
|
||||
* 后端返回: { updated: number }
|
||||
*/
|
||||
static async markAllAsRead(): Promise<{ updated: number }> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return { updated: 2 }
|
||||
}
|
||||
const response = await api.post<{ updated: number }>('/notifications/mark-all-as-read/')
|
||||
return response.data
|
||||
}
|
||||
@@ -38,6 +47,10 @@ export class NotificationService {
|
||||
* 后端返回: { count: number }
|
||||
*/
|
||||
static async getUnreadCount(): Promise<{ count: number }> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockUnreadCount()
|
||||
}
|
||||
const response = await api.get<{ count: number }>('/notifications/unread-count/')
|
||||
return response.data
|
||||
}
|
||||
|
||||
@@ -5,11 +5,16 @@ import type {
|
||||
CreateScheduledScanRequest,
|
||||
UpdateScheduledScanRequest
|
||||
} from '@/types/scheduled-scan.types'
|
||||
import { USE_MOCK, mockDelay, getMockScheduledScans, getMockScheduledScanById } from '@/mock'
|
||||
|
||||
/**
|
||||
* Get scheduled scan list
|
||||
*/
|
||||
export async function getScheduledScans(params?: { page?: number; pageSize?: number; search?: string }): Promise<GetScheduledScansResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockScheduledScans(params)
|
||||
}
|
||||
const res = await api.get<GetScheduledScansResponse>('/scheduled-scans/', { params })
|
||||
return res.data
|
||||
}
|
||||
@@ -18,6 +23,12 @@ export async function getScheduledScans(params?: { page?: number; pageSize?: num
|
||||
* Get scheduled scan details
|
||||
*/
|
||||
export async function getScheduledScan(id: number): Promise<ScheduledScan> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
const scan = getMockScheduledScanById(id)
|
||||
if (!scan) throw new Error('Scheduled scan not found')
|
||||
return scan
|
||||
}
|
||||
const res = await api.get<ScheduledScan>(`/scheduled-scans/${id}/`)
|
||||
return res.data
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { api } from "@/lib/api-client"
|
||||
import type { SearchParams, SearchResponse } from "@/types/search.types"
|
||||
import type { SearchParams, SearchResponse, AssetType } from "@/types/search.types"
|
||||
|
||||
/**
|
||||
* 资产搜索 API 服务
|
||||
@@ -11,6 +11,10 @@ import type { SearchParams, SearchResponse } from "@/types/search.types"
|
||||
* - && AND 连接
|
||||
* - || OR 连接
|
||||
*
|
||||
* 支持的资产类型:
|
||||
* - website: 站点(默认)
|
||||
* - endpoint: 端点
|
||||
*
|
||||
* 示例:
|
||||
* - host="api" && tech="nginx"
|
||||
* - tech="vue" || tech="react"
|
||||
@@ -25,6 +29,7 @@ export class SearchService {
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (params.q) queryParams.append('q', params.q)
|
||||
if (params.asset_type) queryParams.append('asset_type', params.asset_type)
|
||||
if (params.page) queryParams.append('page', params.page.toString())
|
||||
if (params.pageSize) queryParams.append('pageSize', params.pageSize.toString())
|
||||
|
||||
@@ -33,4 +38,38 @@ export class SearchService {
|
||||
)
|
||||
return response.data
|
||||
}
|
||||
|
||||
/**
|
||||
* 导出搜索结果为 CSV
|
||||
* GET /api/assets/search/export/
|
||||
*/
|
||||
static async exportCSV(query: string, assetType: AssetType): Promise<void> {
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('q', query)
|
||||
queryParams.append('asset_type', assetType)
|
||||
|
||||
const response = await api.get(
|
||||
`/assets/search/export/?${queryParams.toString()}`,
|
||||
{ responseType: 'blob' }
|
||||
)
|
||||
|
||||
// 从响应头获取文件名
|
||||
const contentDisposition = response.headers?.['content-disposition']
|
||||
let filename = `search_${assetType}_${new Date().toISOString().slice(0, 10)}.csv`
|
||||
if (contentDisposition) {
|
||||
const match = contentDisposition.match(/filename="?([^"]+)"?/)
|
||||
if (match) filename = match[1]
|
||||
}
|
||||
|
||||
// 创建下载链接
|
||||
const blob = new Blob([response.data as BlobPart], { type: 'text/csv;charset=utf-8' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const link = document.createElement('a')
|
||||
link.href = url
|
||||
link.download = filename
|
||||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
URL.revokeObjectURL(url)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
CreateWorkerRequest,
|
||||
UpdateWorkerRequest,
|
||||
} from '@/types/worker.types'
|
||||
import { USE_MOCK, mockDelay, getMockWorkers, getMockWorkerById } from '@/mock'
|
||||
|
||||
const BASE_URL = '/workers'
|
||||
|
||||
@@ -17,6 +18,10 @@ export const workerService = {
|
||||
* Get Worker list
|
||||
*/
|
||||
async getWorkers(page = 1, pageSize = 10): Promise<WorkersResponse> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
return getMockWorkers(page, pageSize)
|
||||
}
|
||||
const response = await apiClient.get<WorkersResponse>(
|
||||
`${BASE_URL}/?page=${page}&page_size=${pageSize}`
|
||||
)
|
||||
@@ -27,6 +32,12 @@ export const workerService = {
|
||||
* Get single Worker details
|
||||
*/
|
||||
async getWorker(id: number): Promise<WorkerNode> {
|
||||
if (USE_MOCK) {
|
||||
await mockDelay()
|
||||
const worker = getMockWorkerById(id)
|
||||
if (!worker) throw new Error('Worker not found')
|
||||
return worker
|
||||
}
|
||||
const response = await apiClient.get<WorkerNode>(`${BASE_URL}/${id}/`)
|
||||
return response.data
|
||||
},
|
||||
|
||||
@@ -1,15 +1,49 @@
|
||||
// 搜索结果类型
|
||||
export interface SearchResult {
|
||||
// 资产类型
|
||||
export type AssetType = 'website' | 'endpoint'
|
||||
|
||||
// Website 搜索结果类型
|
||||
export interface WebsiteSearchResult {
|
||||
id: number
|
||||
url: string
|
||||
host: string
|
||||
title: string
|
||||
technologies: string[]
|
||||
statusCode: number | null
|
||||
contentLength: number | null
|
||||
contentType: string
|
||||
webserver: string
|
||||
location: string
|
||||
vhost: boolean | null
|
||||
responseHeaders: Record<string, string>
|
||||
responseBody: string
|
||||
createdAt: string | null
|
||||
targetId: number
|
||||
vulnerabilities: Vulnerability[]
|
||||
}
|
||||
|
||||
// Endpoint 搜索结果类型
|
||||
export interface EndpointSearchResult {
|
||||
id: number
|
||||
url: string
|
||||
host: string
|
||||
title: string
|
||||
technologies: string[]
|
||||
statusCode: number | null
|
||||
contentLength: number | null
|
||||
contentType: string
|
||||
webserver: string
|
||||
location: string
|
||||
vhost: boolean | null
|
||||
responseHeaders: Record<string, string>
|
||||
responseBody: string
|
||||
createdAt: string | null
|
||||
targetId: number
|
||||
matchedGfPatterns: string[]
|
||||
}
|
||||
|
||||
// 通用搜索结果类型(兼容旧代码)
|
||||
export type SearchResult = WebsiteSearchResult | EndpointSearchResult
|
||||
|
||||
export interface Vulnerability {
|
||||
id?: number
|
||||
name: string
|
||||
@@ -28,6 +62,7 @@ export interface SearchResponse {
|
||||
page: number
|
||||
pageSize: number
|
||||
totalPages: number
|
||||
assetType: AssetType
|
||||
}
|
||||
|
||||
// 搜索操作符类型
|
||||
@@ -49,6 +84,7 @@ export interface SearchExpression {
|
||||
// 发送给后端的搜索参数
|
||||
export interface SearchParams {
|
||||
q?: string // 完整的搜索表达式字符串
|
||||
asset_type?: AssetType // 资产类型
|
||||
page?: number
|
||||
pageSize?: number
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"buildCommand": "pnpm build",
|
||||
"installCommand": "pnpm install",
|
||||
"env": {
|
||||
"NEXT_PUBLIC_USE_MOCK": "true"
|
||||
"NEXT_PUBLIC_USE_MOCK": "true",
|
||||
"NEXT_PUBLIC_SKIP_AUTH": "true"
|
||||
}
|
||||
}
|
||||
|
||||
114
install.sh
114
install.sh
@@ -42,49 +42,61 @@ CYAN='\033[0;36m'
|
||||
BOLD='\033[1m'
|
||||
RESET='\033[0m'
|
||||
|
||||
# ==============================================================================
|
||||
# 额外颜色定义
|
||||
# ==============================================================================
|
||||
MAGENTA='\033[0;35m'
|
||||
DIM='\033[2m'
|
||||
BG_BLUE='\033[44m'
|
||||
BG_CYAN='\033[46m'
|
||||
|
||||
# ==============================================================================
|
||||
# 日志函数
|
||||
# ==============================================================================
|
||||
info() {
|
||||
echo -e "${BLUE}[INFO]${RESET} $1"
|
||||
echo -e " ${CYAN}▸${RESET} $1"
|
||||
}
|
||||
|
||||
success() {
|
||||
echo -e "${GREEN}[OK]${RESET} $1"
|
||||
echo -e " ${GREEN}✔${RESET} $1"
|
||||
}
|
||||
|
||||
warn() {
|
||||
echo -e "${YELLOW}[WARN]${RESET} $1"
|
||||
echo -e " ${YELLOW}⚠${RESET} $1"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo -e "${RED}[ERROR]${RESET} $1"
|
||||
echo -e " ${RED}✖${RESET} $1"
|
||||
}
|
||||
|
||||
step() {
|
||||
echo -e "\n${BOLD}${CYAN}>>> $1${RESET}"
|
||||
echo -e "\n${BOLD}${CYAN}┌── $1${RESET}"
|
||||
}
|
||||
|
||||
header() {
|
||||
echo -e "${BOLD}${BLUE}============================================================${RESET}"
|
||||
echo -e "${BOLD}${BLUE} $1${RESET}"
|
||||
echo -e "${BOLD}${BLUE}============================================================${RESET}"
|
||||
echo -e ""
|
||||
echo -e "${BOLD}${BLUE}╔══════════════════════════════════════════════════════════╗${RESET}"
|
||||
echo -e "${BOLD}${BLUE}║${RESET} $1"
|
||||
echo -e "${BOLD}${BLUE}╚══════════════════════════════════════════════════════════╝${RESET}"
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
# 显示横幅
|
||||
# ==============================================================================
|
||||
show_banner() {
|
||||
echo -e "${CYAN}"
|
||||
cat << 'EOF'
|
||||
__ __ _ ____ _
|
||||
\ \/ /(_)_ __ __ _| _ \(_)_ __
|
||||
\ / | | '_ \ / _` | |_) | | '_ \
|
||||
/ \ | | | | | (_| | _ <| | | | |
|
||||
/_/\_\|_|_| |_|\__, |_| \_\_|_| |_|
|
||||
|___/
|
||||
EOF
|
||||
echo -e "${RESET}"
|
||||
clear
|
||||
echo -e ""
|
||||
echo -e "${CYAN}${BOLD} ██╗ ██╗██╗███╗ ██╗ ██████╗ ██████╗ ██╗███╗ ██╗${RESET}"
|
||||
echo -e "${CYAN} ╚██╗██╔╝██║████╗ ██║██╔════╝ ██╔══██╗██║████╗ ██║${RESET}"
|
||||
echo -e "${BLUE}${BOLD} ╚███╔╝ ██║██╔██╗ ██║██║ ███╗██████╔╝██║██╔██╗ ██║${RESET}"
|
||||
echo -e "${BLUE} ██╔██╗ ██║██║╚██╗██║██║ ██║██╔══██╗██║██║╚██╗██║${RESET}"
|
||||
echo -e "${MAGENTA}${BOLD} ██╔╝ ██╗██║██║ ╚████║╚██████╔╝██║ ██║██║██║ ╚████║${RESET}"
|
||||
echo -e "${MAGENTA} ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝${RESET}"
|
||||
echo -e ""
|
||||
echo -e "${DIM} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
||||
echo -e "${BOLD} 🔒 分布式安全扫描平台 │ 一键部署 (Ubuntu)${RESET}"
|
||||
echo -e "${DIM} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
||||
echo -e ""
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
@@ -120,7 +132,6 @@ fi
|
||||
|
||||
# 显示标题
|
||||
show_banner
|
||||
header "XingRin 一键安装脚本 (Ubuntu)"
|
||||
info "当前用户: ${BOLD}$REAL_USER${RESET}"
|
||||
info "项目路径: ${BOLD}$ROOT_DIR${RESET}"
|
||||
info "安装版本: ${BOLD}$APP_VERSION${RESET}"
|
||||
@@ -312,8 +323,22 @@ check_pg_ivm() {
|
||||
# 显示安装总结信息
|
||||
show_summary() {
|
||||
echo
|
||||
if [ "$1" == "success" ]; then
|
||||
header "服务已成功启动!"
|
||||
if [ "$1" = "success" ]; then
|
||||
# 成功 Banner
|
||||
echo -e ""
|
||||
echo -e "${GREEN}${BOLD} ╔═══════════════════════════════════════════════════╗${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██████╗ ██████╗ ███╗ ██╗███████╗██║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██╔══██╗██╔═══██╗████╗ ██║██╔════╝██║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██║ ██║██║ ██║██╔██╗ ██║█████╗ ██║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██║ ██║██║ ██║██║╚██╗██║██╔══╝ ╚═╝ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ██████╔╝╚██████╔╝██║ ╚████║███████╗██║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝╚═╝ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ✨ XingRin 平台部署成功! ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ║ ║${RESET}"
|
||||
echo -e "${GREEN}${BOLD} ╚═══════════════════════════════════════════════════╝${RESET}"
|
||||
echo -e ""
|
||||
else
|
||||
header "安装完成 Summary"
|
||||
fi
|
||||
@@ -324,12 +349,11 @@ show_summary() {
|
||||
DB_USER=$(grep "^DB_USER=" "$DOCKER_DIR/.env" | cut -d= -f2)
|
||||
DB_PASSWORD=$(grep "^DB_PASSWORD=" "$DOCKER_DIR/.env" | cut -d= -f2)
|
||||
|
||||
echo -e "${YELLOW}数据库配置:${RESET}"
|
||||
echo -e "------------------------------------------------------------"
|
||||
echo -e " 服务器地址: ${DB_HOST:-未知}"
|
||||
echo -e " 用户名: ${DB_USER:-未知}"
|
||||
echo -e " 密码: ${DB_PASSWORD:-未知}"
|
||||
echo -e "------------------------------------------------------------"
|
||||
echo -e "${DIM} ──────────────────────────────────────────────────────${RESET}"
|
||||
echo -e " ${YELLOW}🗄 数据库配置${RESET}"
|
||||
echo -e " ${DIM}├─${RESET} 服务器地址: ${BOLD}${DB_HOST:-未知}${RESET}"
|
||||
echo -e " ${DIM}├─${RESET} 用户名: ${BOLD}${DB_USER:-未知}${RESET}"
|
||||
echo -e " ${DIM}└─${RESET} 密码: ${BOLD}${DB_PASSWORD:-未知}${RESET}"
|
||||
echo
|
||||
fi
|
||||
|
||||
@@ -341,27 +365,33 @@ show_summary() {
|
||||
ACCESS_HOST="localhost"
|
||||
fi
|
||||
|
||||
echo -e "${GREEN}访问地址:${RESET}"
|
||||
printf " %-16s %s\n" "XingRin:" "https://${ACCESS_HOST}:8083/"
|
||||
echo -e "${DIM} ──────────────────────────────────────────────────────${RESET}"
|
||||
echo -e " ${GREEN}🌐 访问地址${RESET}"
|
||||
echo -e " ${DIM}└─${RESET} XingRin: ${BOLD}${CYAN}https://${ACCESS_HOST}:8083/${RESET}"
|
||||
echo
|
||||
|
||||
echo -e "${YELLOW}默认登录账号:${RESET}"
|
||||
printf " %-16s %s\n" "用户名:" "admin"
|
||||
printf " %-16s %s\n" "密码:" "admin"
|
||||
echo -e "${YELLOW} [!] 请首次登录后修改密码!${RESET}"
|
||||
echo -e "${DIM} ──────────────────────────────────────────────────────${RESET}"
|
||||
echo -e " ${MAGENTA}🔑 默认登录${RESET}"
|
||||
echo -e " ${DIM}├─${RESET} 用户名: ${BOLD}admin${RESET}"
|
||||
echo -e " ${DIM}└─${RESET} 密码: ${BOLD}admin${RESET}"
|
||||
echo -e " ${YELLOW} ⚠ 请首次登录后修改密码!${RESET}"
|
||||
echo
|
||||
|
||||
if [ "$1" != "success" ]; then
|
||||
echo -e "${GREEN}后续启动命令:${RESET}"
|
||||
echo -e " ./start.sh # 启动所有服务"
|
||||
echo -e " ./start.sh --no-frontend # 只启动后端"
|
||||
echo -e " ./stop.sh # 停止所有服务"
|
||||
if [ "$1" = "success" ]; then
|
||||
: # 成功模式,不显示后续命令
|
||||
else
|
||||
echo -e "${DIM} ──────────────────────────────────────────────────────${RESET}"
|
||||
echo -e " ${BLUE}🚀 后续命令${RESET}"
|
||||
echo -e " ${DIM}├─${RESET} ./start.sh ${DIM}# 启动所有服务${RESET}"
|
||||
echo -e " ${DIM}├─${RESET} ./start.sh --no-frontend ${DIM}# 只启动后端${RESET}"
|
||||
echo -e " ${DIM}└─${RESET} ./stop.sh ${DIM}# 停止所有服务${RESET}"
|
||||
echo
|
||||
fi
|
||||
|
||||
echo -e "${YELLOW}[!] 云服务器某些厂商默认开启了安全策略(阿里云/腾讯云/华为云等):${RESET}"
|
||||
echo -e " 端口未放行可能导致无法访问或无法扫描,强烈推荐用国外vps,或者在云控制台放行:"
|
||||
echo -e " ${RESET}8083, 5432"
|
||||
echo -e "${DIM} ──────────────────────────────────────────────────────${RESET}"
|
||||
echo -e " ${YELLOW}⚠ 云服务器端口提醒${RESET}"
|
||||
echo -e " ${DIM}└─${RESET} 某些厂商默认开启安全策略(阿里云/腾讯云/华为云)"
|
||||
echo -e " 端口未放行可能导致无法访问,请在云控制台放行: ${BOLD}8083, 5432${RESET}"
|
||||
echo
|
||||
}
|
||||
|
||||
@@ -714,7 +744,7 @@ fi
|
||||
# 启动服务
|
||||
# ==============================================================================
|
||||
step "正在启动服务..."
|
||||
"$ROOT_DIR/start.sh" $START_ARGS
|
||||
"$ROOT_DIR/start.sh" ${START_ARGS} --quiet
|
||||
|
||||
# ==============================================================================
|
||||
# 完成总结
|
||||
|
||||
Reference in New Issue
Block a user