Skip to content

Commit

Permalink
1.add save share file to netdisk
Browse files Browse the repository at this point in the history
2.add docs
  • Loading branch information
ghjkg546 committed Nov 8, 2024
1 parent 994692d commit 318e3ff
Show file tree
Hide file tree
Showing 27 changed files with 1,200 additions and 4 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
.vscode
/vendor/
/storage/
./config.yaml
66 changes: 64 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,65 @@
# go-blog
# 一站式夸克网盘管理后台

## 简介
近来,自媒体平台上各种给夸克引流的博主,本项目是一个夸克网盘资源管理后台,方便博主们建立自己的资源站

包含了,资源抓取、资源转存、批量创建资源分享并生成web页面、telegram搜索等功能,因为是业余搞的,目前有诸多不足之处,会慢慢完善

## 使用方法

1.修改config_example.yaml为 config.yaml,并填入你的自己的数据库配置
2.编译go二进制文件,并上传到你的服务器

```
go build -o main main.go
chmod +x main
./main
```
访问localhost:8080,即可看到前台页面

3.编译后台文件,进入 https://gitee.com/ghjkg546/ziyuan 里的element-admin目录
```
pnpm install
pnpmbuild
```

得到dist目录上传服务器

4.如果要支持搜索引擎,需要配置zinsearch,用这个 docker-compose up -d
```
version: '3.8'
services:
zincsearch:
image: public.ecr.aws/zinclabs/zincsearch:latest
container_name: zincsearch
ports:
- "4080:4080"
environment:
- ZINC_DATA_PATH=/data
- ZINC_FIRST_ADMIN_USER=admin222
- ZINC_FIRST_ADMIN_PASSWORD=bbb222
volumes:
- ./data:/data
```
访问ip:4080,打开zinsearch后台
创建索引教程
https://blog.csdn.net/lzcs1/article/details/143569012?spm=1001.2014.3001.5502

zinsearch相关配置需要填入config.yaml
search:
url: 'ip:4080'
user_name: '101111' #后台用户的id
password: 'cccdd111'

## 📑 详细功能目录
- [抓取资源](./docs/1_爬虫抓取文件.md)
- [转存资源](./docs/2_转存资源到你的网盘.md)
- [转存资源](./docs/3_分享资源.md)
- [转存资源](./docs/4_资源列表.md)





gentool -dsn "root:root@tcp(localhost:3306)/go-test?charset=utf8mb4&parseTime=True&loc=Local" -tables "sys_dict_item"
11 changes: 11 additions & 0 deletions app/common/request/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,17 @@ type BatchSave struct {
CategoryId uint `form:"category_id" json:"category_id"`
}

type Crawl struct {
DetailUrl string `form:"detail_url" json:"detail_url"`
NameRule string `form:"name_rule" json:"name_rule"`
LinkRule string `form:"link_rule" json:"link_rule"`
}

type TransSave struct {
Ids string `form:"ids" json:"ids"`
Fid string `form:"fid" json:"fid"`
}

type BatchShare struct {
PageSize int `form:"page_size" json:"page_size"`
Fid string `form:"fid" json:"fid"`
Expand Down
300 changes: 300 additions & 0 deletions app/controllers/adminapi/crawl.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,300 @@
package adminapi

import (
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"github.com/jassue/jassue-gin/app/common/request"
"github.com/jassue/jassue-gin/app/common/response"
"github.com/jassue/jassue-gin/app/models"
"github.com/jassue/jassue-gin/app/services"
"github.com/jassue/jassue-gin/global"
"net/http"
"strconv"
"strings"
"time"
)

type CrawlController struct{}

func (uc *CrawlController) GetList(c *gin.Context) {
var users []models.CrawlItem
var totalUsers int64
pageStr := c.DefaultQuery("pageNum", "1")
pageSizeStr := c.DefaultQuery("pageSize", "10")
keyword := c.DefaultQuery("keyword", "")

page, err := strconv.Atoi(pageStr)
if err != nil || page < 1 {
fmt.Println(err)
page = 1
}
//3a0dc2d67ac44660a839a119412a2c4e-github
//err1, str := services.QuarkService.SaveShare("3a0dc2d67ac44660a839a119412a2c4e", "https://pan.quark.cn/s/433aff339be1")
//fmt.Println(str)
//fmt.Println(err1)
//fmt.Println(page)
pageSize, err := strconv.Atoi(pageSizeStr)
if err != nil || pageSize < 1 {
pageSize = 10
}
// Calculate offset and limit
offset := (page - 1) * pageSize
limit := pageSize
db := global.App.DB

query := db.Model(models.CrawlItem{})
if keyword != "" {
query.Where("mobile LIKE ?", "%"+keyword+"%").Or("name LIKE ?", "%"+keyword+"%")
}

query.Count(&totalUsers).Limit(limit).Offset(offset).Order("id desc").Find(&users)
var res = gin.H{
"list": users,
"total": totalUsers,
}
response.Success(c, res)
}

// // Create handles POST requests to create a new user
func (uc *CrawlController) Create(c *gin.Context) {
var input models.ResourceItem
db := global.App.DB
// Bind JSON payload to input
if err := c.BindJSON(&input); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
input.Views = 0
// Save data to database
result := db.Save(&input)
if result.Error != nil {
response.Fail(c, 500, result.Error.Error())

return
}
//index := "resource_item" // string | Index
err1 := json.Unmarshal([]byte(input.DiskItems), &input.DiskItemsArray)
if err1 != nil {
fmt.Println("Error decoding JSON:", err1)
return
}
}

// Batch save to my netdisk
func (uc *CrawlController) BatchSaveToDisk(c *gin.Context) {

var input request.TransSave

// Bind JSON payload to input
if err := c.BindJSON(&input); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
// 拆分字符串为切片
idsSlice := strings.Split(input.Ids, ",")

// 遍历切片并将字符串转换为整数
var ids []int
for _, idStr := range idsSlice {
id, err := strconv.Atoi(idStr)
if err != nil {
fmt.Printf("转换错误: %s\n", err)
continue
}
ids = append(ids, id)
}
// 使用 GORM 删除对应 ID 的记录
var items []models.CrawlItem
if len(ids) > 0 {
// 这里使用 IN 查询来批量删除这些 ID
global.App.DB.Where("id IN ?", ids).Find(&items)
for _, item := range items {
services.QuarkService.SaveShare(input.Fid, item.Url)
time.Sleep(3 * time.Second)
}

}

response.Success(c, nil)
}

// Delete handles DELETE requests to delete a user
func (uc *CrawlController) Delete(c *gin.Context) {
idsString := c.Param("id")
// 拆分字符串为切片
idsSlice := strings.Split(idsString, ",")
// 遍历切片并将字符串转换为整数
var ids []int
for _, idStr := range idsSlice {
id, err := strconv.Atoi(idStr)
if err != nil {
fmt.Printf("转换错误: %s\n", err)
continue
}
ids = append(ids, id)
}
// 使用 GORM 删除对应 ID 的记录
if len(ids) > 0 {
// 这里使用 IN 查询来批量删除这些 ID
global.App.DB.Where("id IN ?", ids).Delete(models.CrawlItem{})
}
response.Success(c, nil)
}

//
//// 等待分享列表
//func (uc *CrawlController) WaitShareList(c *gin.Context) {
// pageStr := c.DefaultQuery("pageNum", "1")
// pageSizeStr := c.DefaultQuery("pageSize", "10")
// fidStr := c.DefaultQuery("fid", "")
//
// page, err := strconv.Atoi(pageStr)
// if err != nil || page < 1 {
// fmt.Println(err)
// page = 1
// }
//
// pageSize, err := strconv.Atoi(pageSizeStr)
// if err != nil || pageSize < 1 {
// pageSize = 10
// }
// var dirResp response.DirResponse
// dirResp = services.QuarkService.GetDirInfo(fidStr, page, pageSize)
//
// var res = gin.H{
// "list": dirResp.Data,
// "total": dirResp.Total,
// }
// response.Success(c, res)
//}
//
//// 等待分享列表
//func (uc *CrawlController) Crawl(c *gin.Context) {
//
// var input request.Crawl
// //db := global.App.DB
// if err := c.BindJSON(&input); err != nil {
// c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
// return
// }
// fmt.Println(input)
// //
// //params := request.Crawl{
// // DetailUrl: "https://example.com",
// // NameRule: `<h1>(.*?)</h1>`, // Example regex for name
// // LinkRule: `href="(https://example.com.*?)"`, // Example regex for link
// //}
//
// name, link, err := CrawlHTML(input)
// if err != nil {
// fmt.Println("Error:", err)
// return
// }
//
// fmt.Println("Name:", name)
// fmt.Println("Link:", link)
// //return
// //page, err := strconv.Atoi(pageStr)
// //if err != nil || page < 1 {
// // fmt.Println(err)
// // page = 1
// //}
// //
// //pageSize, err := strconv.Atoi(pageSizeStr)
// //if err != nil || pageSize < 1 {
// // pageSize = 10
// //}
// //var dirResp response.DirResponse
// //dirResp = services.QuarkService.GetDirInfo(fidStr, page, pageSize)
// //
// //var res = gin.H{
// // "list": dirResp.Data,
// // "total": dirResp.Total,
// //}
// response.Success(c, gin.H{})
//}
//
//func CrawlHTML(params request.Crawl) (string, string, error) {
// // Step 1: Fetch HTML content from the URL
// resp, err := http.Get(params.DetailUrl)
// if err != nil {
// return "", "", fmt.Errorf("failed to fetch URL: %w", err)
// }
// defer resp.Body.Close()
//
// htmlData, err := ioutil.ReadAll(resp.Body)
// if err != nil {
// return "", "", fmt.Errorf("failed to read HTML content: %w", err)
// }
//
// // Step 2: Extract name and link using regex patterns
// nameRegex := regexp.MustCompile(params.NameRule)
// linkRegex := regexp.MustCompile(params.LinkRule)
//
// nameMatch := nameRegex.FindStringSubmatch(string(htmlData))
// linkMatch := linkRegex.FindStringSubmatch(string(htmlData))
//
// // Check if matches were found
// if len(nameMatch) < 2 || len(linkMatch) < 2 {
// return "", "", fmt.Errorf("failed to find matches with the provided regex rules")
// }
//
// // Return matched results
// return nameMatch[1], linkMatch[1], nil
//}
//
//func calculatePages(totalItems, itemsPerPage int) int {
// return int(math.Ceil(float64(totalItems) / float64(itemsPerPage)))
//}
//
//// 批量分享
//func (uc *CrawlController) BatchShare(c *gin.Context) {
// var input request.BatchShare
//
// if err := c.BindJSON(&input); err != nil {
// c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
// return
// }
// var dirResp response.DirResponse
// dirResp = services.QuarkService.GetDirInfo(input.Fid, 1, 50)
// response.Success(c, nil)
// var ids []string
// if input.PageSize >= dirResp.Total {
// var chunks []models.ShareItem
// for _, item := range dirResp.Data {
//
// ids = append(ids, item.Fid)
//
// chunks = append(chunks, models.ShareItem{Name: item.FileName, ID: item.Fid})
// }
// services.QuarkService.SaveResouceByUrl(ids, "test", chunks, input.CategoryId)
// ids = []string{} // Clear the ids slice
// chunks = []models.ShareItem{}
//
// } else {
// pages := calculatePages(dirResp.Total, 50)
// for i := 0; i < pages; i++ {
// dirResp = services.QuarkService.GetDirInfo(input.Fid, i+1, 50)
// fmt.Printf("Processing page %d\n", i+1)
// var chunks []models.ShareItem
// for _, item := range dirResp.Data {
//
// ids = append(ids, item.Fid)
//
// chunks = append(chunks, models.ShareItem{Name: item.FileName, ID: item.Fid})
//
// if len(ids) >= input.PageSize {
//
// services.QuarkService.SaveResouceByUrl(ids, item.FileName, chunks, input.CategoryId)
// ids = []string{} // Clear the ids slice
// chunks = []models.ShareItem{}
// time.Sleep(5 * time.Second)
// }
// }
// }
//
// }
//
// response.Success(c, nil)
//}
Loading

0 comments on commit 318e3ff

Please sign in to comment.