📅  最后修改于: 2023-12-03 14:40:44.208000             🧑  作者: Mango
DirSearch是一个Python编写的开源工具,用于在Web应用程序中查找常见的敏感文件和目录。在这篇文章中,我们将介绍一个基于Go语言实现的DirSearch程序。
要使用DirSearch-Go,请先安装Go语言。然后使用以下命令下载和安装DirSearch-Go:
go get github.com/evilscott/dirsearch-go
使用DirSearch-Go非常简单:
dirsearch-go -u https://example.com -e php,html -w common.txt -t 50
上述命令将在https://example.com上执行DirSearch-Go,限制线程数为50个,仅查找扩展名为.php和.html的文件,使用常用的字典文件common.txt
。
更多的选项和用法请查看命令行帮助:
dirsearch-go -h
package main
import (
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
var (
// 目标URL
targetURL string
// 请求头部
headers []string
// Cookie值
cookie string
// 过滤的文件扩展名
extensions string
// 字典文件路径
wordlist string
// 限制的线程数
threads int
// 输出日志文件
logfile string
// 是否使用代理服务器
proxy string
// 响应状态码的过滤范围
statusCodes string
)
func init() {
flag.StringVar(&targetURL, "u", "", "Target URL (required)")
flag.StringVar(&cookie, "c", "", "Cookie value")
flag.StringVar(&extensions, "e", "php,html", "File extensions to search")
flag.StringVar(&wordlist, "w", "", "Wordlist file path (required)")
flag.IntVar(&threads, "t", 20, "Number of threads")
flag.StringVar(&logfile, "o", "", "Output log file path")
flag.StringVar(&proxy, "p", "", "Proxy server URL")
flag.StringVar(&statusCodes, "s", "200,301,302,403,500", "Status codes to filter")
flag.Parse()
headers = make([]string, 0)
for _, h := range flag.Args() {
headers = append(headers, h)
}
}
func main() {
// 检查目标URL和字典文件
if targetURL == "" || wordlist == "" {
flag.Usage()
return
}
// 初始化LogWriter
var writer *LogWriter
if logfile != "" {
writer, _ = NewLogWriter(logfile)
defer writer.Close()
}
// 初始化HTTP Client
httpClient := HttpClient(proxy)
// 加载字典文件
wordlist, err := LoadWordlist(wordlist)
if err != nil {
fmt.Println("Error:", err)
return
}
// 搜索目标URL
var wg sync.WaitGroup
urls := BuildUrls(targetURL, wordlist, extensions)
chUrls := make(chan string)
chResults := make(chan *HttpResponse)
for i := 0; i < threads; i++ {
wg.Add(1)
go func() {
for url := range chUrls {
chResults <- NewHttpRequest(httpClient, url, headers, cookie)
}
wg.Done()
}()
}
go func() {
for _, url := range urls {
chUrls <- url
}
close(chUrls)
}()
results := make([]*HttpResponse, 0)
for result := range chResults {
if filterStatusCodes(statusCodes, result.StatusCode) {
results = append(results, result)
fmt.Println(result.Url, "-", result.StatusCode)
if writer != nil {
writer.Write(result.Url, result.StatusCode)
}
}
}
wg.Wait()
}
// 遍历字典文件,构造URL列表
func BuildUrls(baseURL string, wordlist []string, extensions string) []string {
urls := make([]string, 0)
for _, word := range wordlist {
for _, ext := range strings.Split(extensions, ",") {
url := fmt.Sprintf("%s/%s.%s", baseURL, word, ext)
urls = append(urls, url)
}
}
return urls
}
// 加载字典文件
func LoadWordlist(wordlist string) ([]string, error) {
data, err := ioutil.ReadFile(wordlist)
if err != nil {
return nil, err
}
return strings.Split(string(data), "\n"), nil
}
// 过滤HTTP响应状态码
func filterStatusCodes(codes string, statusCode int) bool {
for _, code := range strings.Split(codes, ",") {
if strings.TrimSpace(code) != "" && strings.HasPrefix(fmt.Sprintf("%d", statusCode), strings.TrimSpace(code)) {
return true
}
}
return false
}
// 创建HTTP Client
func HttpClient(proxy string) *http.Client {
transport := &http.Transport{}
if proxy != "" {
transport.Proxy = func(req *http.Request) (*url.URL, error) {
return url.Parse(proxy)
}
}
httpClient := &http.Client{
Timeout: 10 * time.Second,
Transport: transport,
}
return httpClient
}
// 创建HTTP请求
func NewHttpRequest(client *http.Client, url string, headers []string, cookie string) *HttpResponse {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return &HttpResponse{Url: url, StatusCode: 0}
}
for _, header := range headers {
kv := strings.Split(header, ":")
if len(kv) < 2 {
continue
}
key := strings.TrimSpace(kv[0])
value := strings.TrimSpace(kv[1])
if key != "" && value != "" {
req.Header.Add(key, value)
}
}
if cookie != "" {
req.Header.Set("Cookie", cookie)
}
resp, err := client.Do(req)
if err != nil {
return &HttpResponse{Url: url, StatusCode: 0}
}
defer resp.Body.Close()
return &HttpResponse{Url: url, StatusCode: resp.StatusCode}
}
// HTTP响应
type HttpResponse struct {
Url string
StatusCode int
}
// 日志输出
type LogWriter struct {
filename string
file *os.File
}
func NewLogWriter(filename string) (*LogWriter, error) {
file, err := os.Create(filename)
if err != nil {
return nil, err
}
return &LogWriter{filename, file}, nil
}
func (this *LogWriter) Write(url string, statusCode int) {
line := fmt.Sprintf("%s - %d\n", url, statusCode)
this.file.WriteString(line)
}
func (this *LogWriter) Close() {
this.file.Close()
}
DirSearch-Go是一个基于Go语言编写的敏感文件和目录扫描工具。除了实现基本的扫描功能外,还支持自定义HTTP请求头部和Cookie,支持多线程扫描,以及支持输出日志文件等高级功能。由于使用的是Go语言,因此性能和稳定性都非常优秀,可以满足大规模的扫描需求。