Skip to content

Commit

Permalink
Merge pull request #4 from N0el4kLs/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
N0el4kLs authored Aug 15, 2024
2 parents 6753bf7 + c603589 commit f2fb19e
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 21 deletions.
17 changes: 15 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -123,11 +123,24 @@ cp .env.example .env

## 参考

[Packer-Fuzzer-一款针对Webpack等前端打包工具所构造的网站进行快速、高效安全检测的扫描工具
](https://github.com/rtcatc/Packer-Fuzzer)
[Packer-Fuzzer-一款针对Webpack等前端打包工具所构造的网站进行快速、高效安全检测的扫描工具](https://github.com/rtcatc/Packer-Fuzzer)

[vueinfo-Extract info from Vue based website](https://github.com/phith0n/vueinfo)

## 前端调试

收集一些前端调试、解密、逆向、api参数构造等相关的渗透测试文章:

[基于Vue开发的前端系统中寻找后端API及其参数](https://xz.aliyun.com/t/14686?time__1311=GqAhYKBKAIqjx05DKA4YuOODgmRoGOfeD)

[基于未授权的渗透测试技巧总结](https://forum.butian.net/share/3086)

[记一次前端断点调试到管理员登陆](https://forum.butian.net/share/3652)

[JS逆向,前端加密暴力破解(小白无痛学习)](https://forum.butian.net/share/3647)




**免责声明: 本工具仅供教育和授权测试目的使用。开发者及贡献者不支持、不鼓励也不赞成任何非法或未经授权的使用。
用户有责任确保其使用本工具的行为符合所有适用的法律法规。严禁将本工具用于任何未经授权的活动。
Expand Down
41 changes: 22 additions & 19 deletions pkg/headless/crawler.go
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,7 @@ func (c *Crawler) GetAllVueRouters(t *types.Task) (*types.Task, *rod.Page) {
if strings.Contains(path, "*") || strings.Contains(path, ":") {
continue
}
if strings.HasPrefix(path, "/") {
path = strings.TrimPrefix(path, "/")
}
path = strings.TrimPrefix(path, "/")
router := fmt.Sprintf("%s%s", baseURL, path)
tmp = append(tmp, router)
}
Expand Down Expand Up @@ -196,12 +194,8 @@ func (c *Crawler) findBaseURL(p *rod.Page) string {
src, exists := s.Attr("src")
if exists {
if strings.HasSuffix(src, ".js") && !strings.HasPrefix(src, "http") {
if strings.HasPrefix(src, "./") {
src = strings.TrimPrefix(src, "./")
}
if strings.HasPrefix(src, "/") {
src = strings.TrimPrefix(src, "/")
}
src = strings.TrimLeft(src, "./")
src = strings.TrimPrefix(src, "/")
firstScriptSrc = src
return
}
Expand Down Expand Up @@ -339,9 +333,7 @@ func tokenizerURL(s string) (string, string) {
baseURI string
uriToken string
)
if strings.Contains(s, "%2F") {
s = strings.Replace(s, "%2F", "/", -1)
}
s = strings.Replace(s, "%2F", "/", -1)

if indexFrag := strings.IndexAny(s, "/#/"); indexFrag != -1 {
cleanURI = strings.Replace(s, "/#/", "/", 1)
Expand All @@ -354,13 +346,24 @@ func tokenizerURL(s string) (string, string) {
return "", ""
}
queries := u.Query()
if len(queries) > 0 && queries.Get("redirect") != "" {
redirect = queries.Get("redirect")
st := strings.Replace(s, redirect, REDIRECT, 1)

index := strings.Index(st, "?")
baseURI = st[:index]
uriToken = st
if len(queries) > 0 {
// check if the url contains redirect fields
// Todo maybe there are more conditions to redirect, need to find a flexible way to check and handle it
if queries.Get("redirect") != "" {
redirect = queries.Get("redirect")
st := strings.Replace(s, redirect, REDIRECT, 1)

index := strings.Index(st, "?")
baseURI = st[:index]
uriToken = st
}
if queries.Get("redirect_url") != "" {
redirect = queries.Get("redirect_url")
st := strings.Replace(s, redirect, REDIRECT, 1)
index := strings.Index(st, "?")
baseURI = st[:index]
uriToken = st
}
} else {
if index := strings.Index(s, "?"); index != -1 {
baseURI = s[:index]
Expand Down

0 comments on commit f2fb19e

Please sign in to comment.