Skip to content

Commit

Permalink
Merge pull request YaoApp#198 from trheyi/main
Browse files Browse the repository at this point in the history
[add] table export unit-test
  • Loading branch information
trheyi authored Oct 13, 2022
2 parents 7cbf655 + 05caf7b commit d71443f
Show file tree
Hide file tree
Showing 6 changed files with 131 additions and 1 deletion.
21 changes: 21 additions & 0 deletions table/process.go
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ func ProcessSelect(process *gou.Process) interface{} {
// ProcessExport xiang.table.Export (:table, :queryParam, :chunkSize)
// Export query result to Excel
func ProcessExport(process *gou.Process) interface{} {
// var testDataM = map[string]string{}

debug := os.Getenv("YAO_EXPORT_DEBUG") != ""
process.ValidateArgNums(1)
Expand Down Expand Up @@ -374,6 +375,8 @@ func ProcessExport(process *gou.Process) interface{} {
pagesize = process.ArgsInt(3, api.DefaultInt(1))
}

// fmt.Printf("\n%d/%d\n", page, pagesize)

if debug {
bytes, _ := jsoniter.Marshal(param)
log.Info("[Export] %s %s %d %d Params: %s", api.Process, filename, page, pagesize, string(bytes))
Expand All @@ -385,6 +388,10 @@ func ProcessExport(process *gou.Process) interface{} {
WithSID(process.Sid).
Run()

// for i, v := range response.(maps.MapStrAny)["data"].([]maps.MapStrAny) {
// fmt.Println("i=", i, "id=", v["id"], v["status"])
// }

if debug {
bytes, _ := jsoniter.Marshal(response)
log.Info("[Export] %s %d %d Prepare: %s", filename, page, pagesize, string(bytes))
Expand All @@ -398,6 +405,20 @@ func ProcessExport(process *gou.Process) interface{} {
log.Info("[Export] %s %d %d Prepare After: %s", filename, page, pagesize, string(bytes))
}

// utils.Dump(respAfterHook)
// for _, v := range respAfterHook.(map[string]interface{})["data"].([]interface{}) {
// name := v.(map[string]interface{})["name"].(string)
// if _, has := testDataM[name]; has {
// fmt.Printf("**** %s Has\n\n", testDataM[name])
// } else {
// testDataM[name] = fmt.Sprintf("%s %d/%d", name, page, pagesize)
// }
// fmt.Println("i=", i, "id=", v.(map[string]interface{})["id"], v.(map[string]interface{})["status"])
// }

// fmt.Println("---")
// fmt.Println("")

res, ok := respAfterHook.(map[string]interface{})
if !ok {
res, ok = respAfterHook.(maps.MapStrAny)
Expand Down
40 changes: 40 additions & 0 deletions table/process_test.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package table

import (
"fmt"
"path/filepath"
"testing"

Expand All @@ -15,6 +16,7 @@ import (
_ "github.com/yaoapp/yao/helper"
"github.com/yaoapp/yao/model"
"github.com/yaoapp/yao/query"
"github.com/yaoapp/yao/script"
"github.com/yaoapp/yao/share"
)

Expand All @@ -23,6 +25,7 @@ func init() {
model.Load(config.Conf)
share.Load(config.Conf)
query.Load(config.Conf)
script.Load(config.Conf)
flow.LoadFrom(filepath.Join(config.Conf.Root, "flows", "hooks"), "hooks.")
Load(config.Conf)
}
Expand Down Expand Up @@ -507,3 +510,40 @@ func TestTableProcessExportWithHook(t *testing.T) {
// assert.Equal(t, 2, res.Get("pagesize"))
// assert.Equal(t, float64(100), res.Get("after"))
}

func TestTableProcessExportWithScriptHook(t *testing.T) {
// testData()
args := []interface{}{"hooks.search_script"}
response := gou.NewProcess("xiang.table.Export", args...).Run()
// utils.Dump(response)

assert.NotNil(t, response)
// res := any.Of(response).Map()
// assert.True(t, res.Has("data"))
// assert.True(t, res.Has("next"))
// assert.True(t, res.Has("page"))
// assert.True(t, res.Has("pagecnt"))
// assert.True(t, res.Has("pagesize"))
// assert.True(t, res.Has("prev"))
// assert.True(t, res.Has("total"))
// assert.Equal(t, 1, res.Get("page"))
// assert.Equal(t, 2, res.Get("pagesize"))
// assert.Equal(t, float64(100), res.Get("after"))
}

func testData() {
m := gou.Select("service")
data := [][]interface{}{}
for i := 0; i < 2000; i++ {
col := []interface{}{fmt.Sprintf("NAME-%d", i), 1, 1}
data = append(data, col)
}

m.DestroyWhere(gou.QueryParam{
Wheres: []gou.QueryWhere{
{Column: "id", OP: "ge", Value: 0},
},
})
err := m.Insert([]string{"name", "kind_id", "manu_id"}, data)
fmt.Println(err)
}
2 changes: 1 addition & 1 deletion table/table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ func check(t *testing.T) {
for key := range Tables {
keys = append(keys, key)
}
assert.Equal(t, 10, len(keys))
assert.Equal(t, 11, len(keys))

demo := Select("demo")
assert.NotNil(t, demo.Columns["類型"])
Expand Down
9 changes: 9 additions & 0 deletions tests/backup/paginate_test.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@

## ERROR
select `service`.`id` as `service_id`, `service`.`short_name` as `service_short_name`, `service`.`name` as `service_name`, `service`.`city` as `service_city`, `service`.`kind_id` as `service_kind_id`, `service`.`manu_id` as `service_manu_id`, `service`.`registed_at` as `service_registed_at`, `service`.`link` as `service_link`, `service`.`contact_name` as `service_contact_name`, `service`.`contact_mobile` as `service_contact_mobile`, `service`.`fields` as `service_fields`, `service`.`price_options` as `service_price_options`, `service`.`industries` as `service_industries`, `service`.`rank` as `service_rank`, `service`.`status` as `service_status`, `service`.`deleted_at` as `service_deleted_at`, `service`.`created_at` as `service_created_at`, `service`.`updated_at` as `service_updated_at` from `service` as `service` where `service`.`deleted_at` is null order by `service`.`created_at` limit 1 offset 907
select `service`.`id` as `service_id`, `service`.`short_name` as `service_short_name`, `service`.`name` as `service_name`, `service`.`city` as `service_city`, `service`.`kind_id` as `service_kind_id`, `service`.`manu_id` as `service_manu_id`, `service`.`registed_at` as `service_registed_at`, `service`.`link` as `service_link`, `service`.`contact_name` as `service_contact_name`, `service`.`contact_mobile` as `service_contact_mobile`, `service`.`fields` as `service_fields`, `service`.`price_options` as `service_price_options`, `service`.`industries` as `service_industries`, `service`.`rank` as `service_rank`, `service`.`status` as `service_status`, `service`.`deleted_at` as `service_deleted_at`, `service`.`created_at` as `service_created_at`, `service`.`updated_at` as `service_updated_at` from `service` as `service` where `service`.`deleted_at` is null order by `service`.`created_at` limit 1 offset 1740


## CORRECT
select `service`.`id` as `service_id`, `service`.`short_name` as `service_short_name`, `service`.`name` as `service_name`, `service`.`city` as `service_city`, `service`.`kind_id` as `service_kind_id`, `service`.`manu_id` as `service_manu_id`, `service`.`registed_at` as `service_registed_at`, `service`.`link` as `service_link`, `service`.`contact_name` as `service_contact_name`, `service`.`contact_mobile` as `service_contact_mobile`, `service`.`fields` as `service_fields`, `service`.`price_options` as `service_price_options`, `service`.`industries` as `service_industries`, `service`.`rank` as `service_rank`, `service`.`status` as `service_status`, `service`.`deleted_at` as `service_deleted_at`, `service`.`created_at` as `service_created_at`, `service`.`updated_at` as `service_updated_at` from `service` as `service` where `service`.`deleted_at` is null order by `service`.`created_at`, `service`.`id` limit 1 offset 907
select `service`.`id` as `service_id`, `service`.`short_name` as `service_short_name`, `service`.`name` as `service_name`, `service`.`city` as `service_city`, `service`.`kind_id` as `service_kind_id`, `service`.`manu_id` as `service_manu_id`, `service`.`registed_at` as `service_registed_at`, `service`.`link` as `service_link`, `service`.`contact_name` as `service_contact_name`, `service`.`contact_mobile` as `service_contact_mobile`, `service`.`fields` as `service_fields`, `service`.`price_options` as `service_price_options`, `service`.`industries` as `service_industries`, `service`.`rank` as `service_rank`, `service`.`status` as `service_status`, `service`.`deleted_at` as `service_deleted_at`, `service`.`created_at` as `service_created_at`, `service`.`updated_at` as `service_updated_at` from `service` as `service` where `service`.`deleted_at` is null order by `service`.`created_at`, `service`.`id` limit 1 offset 1740
17 changes: 17 additions & 0 deletions tests/scripts/service.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
function BeforeSearch(param, page, pagesize) {
return [param, page, pagesize];
}

function AfterSearch(data) {
var newData = data || {};
var items = data.data || [];
for (var i in items) {
items[i]["city"] = `ID: ${items[i]["id"]} 城市: ${items[i]["city"]} `;
var res = Process("models.service.Find", items[i]["id"], {});
// console.log(
// `id:${items[i]["id"]} ${res["name"]} page: ${newData["page"]} pagesize: ${newData["pagesize"]} pagecnt: ${newData["pagecnt"]} next:${newData["next"]}`
// );
}
newData["data"] = items;
return newData;
}
43 changes: 43 additions & 0 deletions tests/tables/hooks/search_script.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
{
"name": "云服务库",
"version": "1.0.0",
"decription": "云服务库",
"bind": {
"model": "service",
"withs": {}
},
"hooks": {
"before:search": "scripts.service.BeforeSearch",
"after:search": "scripts.service.AfterSearch"
},
"apis": {
"search": {
"process": "models.service.Paginate",
"guard": "-",
"default": [{ "orders": [{ "column": "id" }] }, null, 15]
}
},
"columns": {},
"filters": {},
"list": {
"primary": "id",
"layout": {
"columns": [
{ "name": "id", "width": 6 },
{ "name": "服务名称" },
{ "name": "城市" }
],
"filters": []
},
"actions": {}
},
"edit": {
"primary": "id",
"layout": {
"fieldset": [{ "columns": [{ "name": "id", "width": 6 }] }]
},
"actions": { "cancel": {} }
},
"insert": {},
"view": {}
}

0 comments on commit d71443f

Please sign in to comment.