feat(update): sync with project latest

This commit is contained in:
🐟 2023-05-13 00:55:47 +08:00
parent 10b803dfde
commit 4d7c7df9cf
64 changed files with 1353 additions and 355 deletions

26
api/require/common.go Normal file
View File

@ -0,0 +1,26 @@
package require
type CommonPaginationReq struct {
Page int `form:"page" json:"page" swaggerignore:"true" `
Size int `form:"size" json:"size" swaggerignore:"true" `
}
func (req CommonPaginationReq) ToParam() *PaginationParam {
p := new(PaginationParam)
p.Size = req.Size
p.Page = req.Page
return p
}
type PaginationParam struct {
CommonPaginationReq
// rsp total page_size
Total int64
}
func (p *PaginationParam) Enable() bool {
return p.Page > 0 && p.Size > 0
}
func (p *PaginationParam) Offset() int {
return (p.Page - 1) * p.Size
}

19
api/response/common.go Normal file
View File

@ -0,0 +1,19 @@
package response
import (
"demo-server/api/require"
)
type CommonPaginationRsp[Item any] struct {
List []Item `json:"list"`
Total int64 `json:"total"` // 总数
Page int `json:"page"` // 页码
Size int `json:"page_size"` // 页大小
}
func (rsp *CommonPaginationRsp[Item]) SetResult(list []Item, pagination *require.PaginationParam) {
rsp.List = list
rsp.Page = pagination.Page
rsp.Total = pagination.Total
rsp.Size = pagination.Size
}

25
api/response/response.go Normal file
View File

@ -0,0 +1,25 @@
package response
type Response struct {
// 状态码
// * 0 SUCCESS
// * 2 WARN 警告
// * 5 INPUT_ERROR 输入错误
// * 7 ERROR 执行错误
Code int `json:"code"`
Msg string `json:"msg"` // 返回信息
Data any `json:"data"` // 返回数据
}
const (
SUCCESS = 0
WARN = 2
INPUT_ERROR = 5
ERROR = 7
)
const (
MsgSuccess = "success. "
MsgError = "server internal error. "
MsgInputError = "client require error. "
)

View File

@ -1,4 +1,4 @@
package cmd package init
import ( import (
"fmt" "fmt"
@ -7,6 +7,12 @@ import (
"runtime" "runtime"
) )
var cfgPath string
func SetCfg(cfg string) {
cfgPath = cfg
}
/* /*
init 包内处理项目初始化流程 包括: init 包内处理项目初始化流程 包括:
@ -22,15 +28,22 @@ import (
// //
// func call return error 程序应该停止运行 // func call return error 程序应该停止运行
func InitFundamental() error { func InitFundamental() error {
return mustSuccess(InitConfig, InitLogger) return mustSuccess(initConfigWarpDefault, initLogger)
} }
func InitServer() error { func InitServer() error {
return mustSuccess() return mustSuccess(
initConfigWarpDefault,
initLogger,
//initFileSystem,
//initDB,
//initDBTables,
//initService,
)
} }
var ( var (
// ErrorInitFundamental mean that InitFundamental() return error then program continue running! // ErrorInitFundamental mean that initFundamental() return error then program continue running!
ErrorInitFundamental = errors.New("app fundamental init error! ") ErrorInitFundamental = errors.New("app fundamental init error! ")
) )

116
cmd/init/conf.go Normal file
View File

@ -0,0 +1,116 @@
package init
import (
"demo-server/internal/config"
"demo-server/internal/model"
"demo-server/pkg/str"
"encoding/json"
"fmt"
"github.com/fsnotify/fsnotify"
"github.com/spf13/viper"
"go.uber.org/zap"
"os"
"strings"
)
func initConfigWarpDefault() (err error) {
if err = initConfig(); err == nil {
return
}
println("[warning] boot from [" + cfgPath + "] err: " + err.Error() + ", system will retry default config")
const defaultConf = "./default/application.yaml"
cfgPath = defaultConf
if err = initConfig(); err == nil {
defer func() {
if r := recover(); r != nil {
zap.L().Error("init panic", zap.Any("recover", r))
}
}()
}
return
}
func initConfig() (err error) {
if cfgPath != "" {
// Use config file from the flag.
viper.AddConfigPath(cfgPath)
} else {
var workdir string
// Find workdir.
workdir, err = os.Getwd()
if err != nil {
return err
}
// set default config file $PWD/conf/application.yml
viper.AddConfigPath(workdir + model.ConfigFilePath)
}
viper.SetConfigType(model.ConfigType)
viper.SetConfigName(model.ConfigName)
configDefault()
configWatch()
viper.AutomaticEnv() // read in environment variables that match
// If a config file is found, read it in.
if err = viper.ReadInConfig(); err != nil {
return
}
if err = viper.Unmarshal(config.Get()); err != nil {
return
}
cfgData, _ := json.Marshal(config.Get())
println("read config ->", str.B2S(cfgData))
svr := config.Get().Svr
if strings.HasPrefix(svr.Address, ":") {
svr.Address = "0.0.0.0" + svr.Address
}
return err
}
func configDefault() {
// // server
// viper.SetDefault("server.address", model.DefaultServerAddress)
//
// // file
// viper.SetDefault("file.root", model.DefaultFSRoot)
// viper.SetDefault("file.prefix_upload", model.DefaultFSUploadPrefix)
//
// // db
// viper.SetDefault("db.type", db.TypeSQLITE)
// viper.SetDefault("db.dsn", model.DefaultDBDSN)
// viper.SetDefault("db.log_level", model.DefaultDBLogLevel)
//
// // kafka
// viper.SetDefault("kafka.producer.batch_size", model.DefaultKafkaBatchSize)
// viper.SetDefault("kafka.producer.batch_bytes", model.DefaultKafkaBatchByte)
// viper.SetDefault("kafka.chunk_size", model.DefaultKafkaChunkSize)
// viper.SetDefault("kafka.block_size", model.DefaultKafkaBlockSize)
//
// viper.SetDefault("prometheus.push_gateway", model.DefaultPromPushGateway)
// viper.SetDefault("prometheus.interval", model.DefaultPromInterval)
//
// viper.SetDefault("async.offline_interval", model.DefaultOfflineInterval)
// viper.SetDefault("async.offline_threshold", model.DefaultOfflineThreshold)
//
}
func configWatch() {
viper.OnConfigChange(func(in fsnotify.Event) {
eventString := fmt.Sprintf("%s -> `%s`", in.Op, in.Name)
zap.L().Info("config file change", zap.String("event", eventString))
if err := viper.Unmarshal(config.Get()); err != nil {
zap.L().Info("save config err!", zap.Error(err))
}
//server.GetSvr().Dispatch(model.GetConfig())
})
viper.WatchConfig()
}

77
cmd/init/db.go Normal file
View File

@ -0,0 +1,77 @@
package init
//import (
// "btdp-agent-admin/internal/config"
// "btdp-agent-admin/internal/dao"
// "btdp-agent-admin/internal/dao/db"
// "btdp-agent-admin/internal/dao/group"
// "btdp-agent-admin/internal/dao/instance"
// "btdp-agent-admin/internal/dao/log"
// "btdp-agent-admin/internal/model"
//)
//
//var (
// d *daoPolymer
//)
//
//func getDaoPolymer() dao.Interface {
// return d
//}
//
//type daoPolymer struct {
// cli *db.Cli
// group group.Dao
// agent instance.Dao
// log log.Dao
//}
//
//func initDaoPolymer(cli *db.Cli) {
// d = &daoPolymer{
// cli: cli,
// group: group.New(cli),
// agent: instance.New(cli),
// log: log.New(cli),
// }
//}
//
//func (d *daoPolymer) GetGroupDao() group.Dao {
// return d.group
//}
//
//func (d *daoPolymer) GetInstanceDao() instance.Dao {
// return d.agent
//}
//
//func (d *daoPolymer) GetLogDao() log.Dao {
// return d.log
//}
//
//// 初始化数据库,通过 getDaoPolymer 得到聚合体
//func initDB() (err error) {
// cfg := config.Get()
//
// if cfg == nil || cfg.DB == nil {
// return ErrorInitFundamental
// }
//
// var (
// cli *db.Cli
// cfgDB = cfg.DB
// )
//
// if cli, err = db.New(cfgDB); err != nil {
// return
// }
//
// initDaoPolymer(cli)
// return
//}
//
//func initDBTables() (err error) {
// orm := d.cli.Orm()
// err = orm.Exec(model.DBSQLInit).Error
// if config.Get().DB.Type == model.DBTypeSQLITE {
// orm.Exec(model.DBSQLConfigSQLite)
// }
// return
//}

View File

@ -1,11 +1,11 @@
package cmd package init
import ( import (
"demo-server/internal/config"
"demo-server/internal/config/logger"
"fmt" "fmt"
"github.com/spf13/viper" "github.com/spf13/viper"
"os" "os"
"packet-ui/internal/model"
"packet-ui/internal/model/logger"
"go.uber.org/zap" "go.uber.org/zap"
"go.uber.org/zap/zapcore" "go.uber.org/zap/zapcore"
@ -22,8 +22,8 @@ logger 使用规范
- warn 非关键可继续执行流程的部分 err - warn 非关键可继续执行流程的部分 err
*/ */
func InitLogger() error { func initLogger() error {
cfg := model.GetConfig().Log cfg := config.Get().Log
if cfg == nil { if cfg == nil {
return ErrorInitFundamental return ErrorInitFundamental
} }

View File

@ -1,85 +0,0 @@
package cmd
import (
"encoding/json"
"fmt"
"github.com/fsnotify/fsnotify"
"github.com/spf13/viper"
"go.uber.org/zap"
"os"
"packet-ui/internal/model"
"packet-ui/internal/server"
"packet-ui/pkg"
"packet-ui/pkg/db"
"strings"
)
func InitConfig() (err error) {
if cfgFile != "" {
// Use config file from the flag.
viper.SetConfigFile(cfgFile)
} else {
var workdir string
// Find workdir.
workdir, err = os.Getwd()
if err != nil {
return err
}
// set default config file $PWD/conf/application.yml
viper.AddConfigPath(workdir + "/conf")
viper.SetConfigType("yaml")
viper.SetConfigName("application")
}
configDefault()
configWatch()
viper.AutomaticEnv() // read in environment variables that match
// If a config file is found, read it in.
if err = viper.ReadInConfig(); err != nil {
return
}
if err = viper.Unmarshal(model.GetConfig()); err != nil {
return
}
cfgData, _ := json.Marshal(model.GetConfig())
println("config", pkg.B2S(cfgData))
svr := model.GetConfig().Svr
if strings.HasPrefix(svr.Address, ":") {
svr.Address = "0.0.0.0" + svr.Address
}
//defer func() {
// if e := viper.WriteConfig(); e != nil {
// zap.L().Warn("save config err!", zap.Error(e))
// }
//}()
return err
}
func configDefault() {
// db
viper.SetDefault("db.type", db.TypeSQLITE)
viper.SetDefault("db.dsn", "./packet-ui.db")
// file
viper.SetDefault("file.data", "./data/root")
// viper.
}
func configWatch() {
viper.OnConfigChange(func(in fsnotify.Event) {
eventString := fmt.Sprintf("%s -> `%s`", in.Op, in.Name)
zap.L().Info("config file change", zap.String("event", eventString))
if err := viper.Unmarshal(model.GetConfig()); err != nil {
zap.L().Info("save config err!", zap.Error(err))
}
server.GetSvr().Dispatch(model.GetConfig())
})
viper.WatchConfig()
}

View File

@ -2,13 +2,13 @@ package cmd
import ( import (
"context" "context"
"demo-server/internal/model"
"demo-server/internal/server"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"go.uber.org/zap" "go.uber.org/zap"
"net/http" "net/http"
"os" "os"
"os/signal" "os/signal"
"packet-ui/internal/model"
"packet-ui/internal/server"
"syscall" "syscall"
) )
@ -57,7 +57,7 @@ func runHttpServer() {
} }
}() }()
config := model.GetConfig() config := config.GetConfig()
if config == nil || config.Svr == nil { if config == nil || config.Svr == nil {
zap.S().Panic("server config nil!", ErrorInitFundamental) zap.S().Panic("server config nil!", ErrorInitFundamental)
} }

2
go.mod
View File

@ -1,4 +1,4 @@
module packet-ui module demo-server
go 1.19 go 1.19

View File

@ -1,8 +1,8 @@
package model package config
import ( import (
"packet-ui/internal/model/logger" "demo-server/internal/config/logger"
"packet-ui/internal/model/server" "demo-server/internal/config/server"
) )
type Configuration struct { type Configuration struct {

View File

@ -1,4 +1,4 @@
package model package config
var ( var (
// Conf 全局配置 通过 config.Conf 引用 // Conf 全局配置 通过 config.Conf 引用
@ -7,7 +7,7 @@ var (
//DataDBCli *db.Client //DataDBCli *db.Client
) )
func GetConfig() *Configuration { func Get() *Configuration {
return Conf return Conf
} }

View File

@ -0,0 +1,23 @@
package middleware
import (
"github.com/gin-gonic/gin"
"net/http"
)
func Cors() gin.HandlerFunc {
return func(c *gin.Context) {
method := c.Request.Method
c.Header("Access-Control-Allow-Origin", "http://localhost:520") // 可将将 * 替换为指定的域名
c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE, UPDATE")
c.Header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Authorization")
c.Header("Access-Control-Expose-Headers", "Content-Length, Access-Control-Allow-Origin, Access-Control-Allow-Headers, Cache-Control, Content-Language, Content-Type")
c.Header("Access-Control-Allow-Credentials", "true")
if method == "OPTIONS" {
c.AbortWithStatus(http.StatusNoContent)
}
c.Next()
}
}

40
internal/model/const.go Normal file
View File

@ -0,0 +1,40 @@
package model
const (
_ = 1 << (10 * iota) // ignore first value by assigning to blank identifier
KB // 1024
MB // 1024 * KB
GB // 1024 * MB
TB // 1024 * GB
PB // 1024 * TB
EB // 1024 * PB
ZB // 1024 * EB
YB // 1024 * ZB
)
const (
GroupIDLength = 4
GroupDefaultName = "default"
GroupBroadcastName = "all"
)
const (
LogCreateInBatchSize = 100
)
const (
// ListDefaultBufferSize 列表查询默认 size
ListDefaultBufferSize = 8
ListDefaultLimitSize = 100000
)
const (
ConfigFilePath = "./conf"
ConfigFileName = ConfigName + "." + ConfigType
ConfigType = "yaml"
ConfigName = "application"
)
const (
FileFormKey = "file"
)

156
internal/model/db-init.sql Normal file
View File

@ -0,0 +1,156 @@
CREATE TABLE if not exists "agent_admin_group_all"
(
id text primary key unique,
name text, -- '分组名称'
data blob, -- '配置文件数据'
created_at datetime, -- '创建时间'
updated_at datetime, -- '更新时间'
deleted_at datetime, -- '删除标志'
check (length('name') > 0)
);
CREATE TABLE if not exists "agent_admin_instance_all"
(
id text primary key unique,
gid integer references agent_admin_group_all (id),
ip text, -- '实例发送消息的 ip' // 需要索引
topic text, -- '实例通信 topic'
version text, -- '实例版本'
hostname text, -- '实例通信主机名' // 需要索引
state text, -- '实例状态' // 需要索引
switch text, -- '实例开启状态' // 需要索引
data blob, -- '配置文件数据'
cpu_agent int, -- 'Agent 占用的 CPU 资源'
cpu_total int, -- 'Machine 占用的 CPU 资源'
cpu_maximum int, -- 'Machine 拥有的总 CPU 资源'
mem_agent int, -- 'Agent 占用的 内存 量bytes'
mem_total int, -- 'Machine 占用的 内存 量bytes'
mem_maximum int, -- 'Machine 拥有的总 内存 量bytes'
bandwidth_agent_in int, -- 'Agent 占用的入向带宽'
bandwidth_total_in int, -- 'Machine 占用的入向带宽'
bandwidth_agent_out int, -- 'Agent 占用的出向带宽'
bandwidth_total_out int, -- 'Machine 占用的出向带宽'
latest_reported_at datetime, -- '实例最后上报时间'
created_at datetime, -- '创建时间'
updated_at datetime, -- '更新时间'
deleted_at datetime, -- '删除标志'
CHECK (state in ('running', 'stopped', 'upgrading', 'offline', 'upgrade_failed')),
CHECK (switch in ('on', 'off'))
);
CREATE INDEX if not exists index_ins_gid_refer on agent_admin_instance_all (gid);
CREATE INDEX if not exists index_ins_state_idx on agent_admin_instance_all (state);
CREATE INDEX if not exists index_ins_switch_idx on agent_admin_instance_all (switch);
CREATE INDEX if not exists index_ins_hostname_idx on agent_admin_instance_all (hostname);
CREATE INDEX if not exists index_ins_cpu_agent_idx on agent_admin_instance_all (cpu_agent);
CREATE INDEX if not exists index_ins_cpu_total_idx on agent_admin_instance_all (cpu_total);
CREATE INDEX if not exists index_ins_cpu_maximum_idx on agent_admin_instance_all (cpu_maximum);
CREATE INDEX if not exists index_ins_mem_agent_idx on agent_admin_instance_all (mem_agent);
CREATE INDEX if not exists index_ins_mem_total_idx on agent_admin_instance_all (mem_total);
CREATE INDEX if not exists index_ins_mem_maximum_idx on agent_admin_instance_all (mem_maximum);
CREATE INDEX if not exists index_ins_bandwidth_agent_in_idx on agent_admin_instance_all (bandwidth_agent_in);
CREATE INDEX if not exists index_ins_bandwidth_total_in_idx on agent_admin_instance_all (bandwidth_total_in);
CREATE INDEX if not exists index_ins_bandwidth_agent_out_idx on agent_admin_instance_all (bandwidth_agent_out);
CREATE INDEX if not exists index_ins_bandwidth_total_out_idx on agent_admin_instance_all (bandwidth_total_out);
CREATE TABLE if not exists "agent_admin_log_all"
(
id text, -- references agent_admin_instance_all (id)
created_at datetime, -- '日志记录时间 (时间戳)'
deleted_at datetime, -- '删除标志'
type text, -- '日志记录时刻,实例状态'
operator text, -- '变更操作者'
message text, -- '日志附带信息'
CHECK (type in
('action_up', 'action_down', 'action_config', 'action_upgrade', 'action_join', 'stopped', 'upgrading',
'upgrade_success', 'upgrade_failed', 'offline', 'running', 'register'))
);
-- insert default record
INSERT OR IGNORE INTO agent_admin_group_all (id, name, data, created_at, updated_at, deleted_at)
VALUES ('default', '默认分组', null, datetime('now'), datetime('now'), null);
CREATE TABLE if not exists "agent_admin_group_all"
(
id text primary key unique,
name text, -- '分组名称'
data blob, -- '配置文件数据'
created_at datetime, -- '创建时间'
updated_at datetime, -- '更新时间'
deleted_at datetime, -- '删除标志'
check (length('name') > 0)
);
CREATE TABLE if not exists "agent_admin_instance_all"
(
id text primary key unique,
gid integer references agent_admin_group_all (id),
ip text, -- '实例发送消息的 ip' // 需要索引
topic text, -- '实例通信 topic'
hostname text, -- '实例通信主机名' // 需要索引
state text, -- '实例状态' // 需要索引
switch text, -- '实例开启状态' // 需要索引
data blob, -- '配置文件数据'
cpu_agent int, -- 'Agent 占用的 CPU 资源'
cpu_total int, -- 'Machine 占用的 CPU 资源'
cpu_maximum int, -- 'Machine 拥有的总 CPU 资源'
mem_agent int, -- 'Agent 占用的 内存 量bytes'
mem_total int, -- 'Machine 占用的 内存 量bytes'
mem_maximum int, -- 'Machine 拥有的总 内存 量bytes'
bandwidth_agent_in int, -- 'Agent 占用的入向带宽'
bandwidth_total_in int, -- 'Machine 占用的入向带宽'
bandwidth_agent_out int, -- 'Agent 占用的出向带宽'
bandwidth_total_out int, -- 'Machine 占用的出向带宽'
created_at datetime, -- '创建时间'
latest_reported_at datetime, -- '最新指标更新时间'
updated_at datetime, -- '更新时间'
deleted_at datetime, -- '删除标志'
CHECK (state in ('running', 'stopped', 'upgrading', 'offline', 'upgrade_failed')),
CHECK (switch in ('on', 'off'))
);
CREATE INDEX if not exists index_ins_gid_refer on agent_admin_instance_all (gid);
CREATE INDEX if not exists index_ins_state_idx on agent_admin_instance_all (state);
CREATE INDEX if not exists index_ins_switch_idx on agent_admin_instance_all (switch);
CREATE INDEX if not exists index_ins_hostname_idx on agent_admin_instance_all (hostname);
CREATE INDEX if not exists index_ins_cpu_agent_idx on agent_admin_instance_all (cpu_agent);
CREATE INDEX if not exists index_ins_cpu_total_idx on agent_admin_instance_all (cpu_total);
CREATE INDEX if not exists index_ins_cpu_maximum_idx on agent_admin_instance_all (cpu_maximum);
CREATE INDEX if not exists index_ins_mem_agent_idx on agent_admin_instance_all (mem_agent);
CREATE INDEX if not exists index_ins_mem_total_idx on agent_admin_instance_all (mem_total);
CREATE INDEX if not exists index_ins_mem_maximum_idx on agent_admin_instance_all (mem_maximum);
CREATE INDEX if not exists index_ins_bandwidth_agent_in_idx on agent_admin_instance_all (bandwidth_agent_in);
CREATE INDEX if not exists index_ins_bandwidth_total_in_idx on agent_admin_instance_all (bandwidth_total_in);
CREATE INDEX if not exists index_ins_bandwidth_agent_out_idx on agent_admin_instance_all (bandwidth_agent_out);
CREATE INDEX if not exists index_ins_bandwidth_total_out_idx on agent_admin_instance_all (bandwidth_total_out);
CREATE TABLE if not exists "agent_admin_log_all"
(
id text, -- references agent_admin_instance_all (id)
created_at datetime, -- '日志记录时间 (时间戳)'
deleted_at datetime, -- '删除标志'
type text, -- '日志记录时刻,实例状态'
operator text, -- '变更操作者'
message text, -- '日志附带信息'
CHECK (type in
('action_up', 'action_down', 'action_config', 'action_upgrade', 'action_join', 'stopped', 'upgrading',
'upgrade_success', 'upgrade_failed', 'offline', 'running', 'register'))
);
-- insert default record
INSERT OR IGNORE INTO agent_admin_group_all (id, name, data, created_at, updated_at, deleted_at)
VALUES ('default', '默认分组', null, datetime('now'), datetime('now'), null);
-- count
SELECT count(*) as 'cnt0'
FROM `agent_admin_instance_all`
WHERE gid = 'default'
AND `agent_admin_instance_all`.`deleted_at` IS NULL
LIMIT 10 OFFSET 0;
SELECT count(*) as 'cnt1'
FROM `agent_admin_instance_all`
WHERE gid = 'default'
AND `agent_admin_instance_all`.`deleted_at` IS NULL
LIMIT 10 OFFSET 1;
SELECT count(*) as 'cnt10'
FROM `agent_admin_instance_all`
WHERE gid = 'default'
AND `agent_admin_instance_all`.`deleted_at` IS NULL
LIMIT 10 OFFSET 10;

View File

@ -0,0 +1,10 @@
-- Set the cache size:
-- 设置缓存大小:
PRAGMA cache_size = 10000;
-- Set the page size:
-- 设置页面大小:
PRAGMA page_size = 4096;
-- Enable Write-Ahead Logging (WAL):
-- 启用预写日志 (WAL):
PRAGMA journal_mode = WAL;

17
internal/model/db.go Normal file
View File

@ -0,0 +1,17 @@
package model
import (
_ "embed"
)
const (
DBTypeSQLITE = "sqlite"
DBTypeMYSQL = "mysql"
)
var (
//go:embed db-init.sql
DBSQLInit string
//go:embed db-sqlite.sql
DBSQLConfigSQLite string
)

View File

@ -1,3 +0,0 @@
package model
type SetErr func(err error)

View File

@ -0,0 +1,26 @@
//go:build swag
package ext
import (
_ "demo-server/docs"
"github.com/gin-gonic/gin"
swaggerFiles "github.com/swaggo/files"
)
func init() {
initEngQueue = append(initEngQueue, initDOC)
}
func initDOC(r *gin.Engine) {
const docPrefix = "/swagger"
// swagger
r.GET(docPrefix+"/*any", ginSwagger.WrapHandler(
swaggerFiles.Handler,
//ginSwagger.URL("/swagger/doc.json"),
))
r.GET(docPrefix, func(c *gin.Context) {
c.Redirect(301, docPrefix+"/index.html")
})
}

View File

@ -0,0 +1,101 @@
//go:build web
package ext
import (
"crypto/md5"
"demo-server/pkg/str"
"embed"
"fmt"
"github.com/gin-gonic/gin"
"github.com/vearutop/statigz"
"go.uber.org/zap"
"io"
"io/fs"
"net/http"
"strings"
)
/*
embed static web learn insp opensource license MIT
see "github.com/v2rayA/v2rayA/server/router/index.go"
*/
func init() {
initEngQueue = append(initEngQueue, initGUI)
}
// webRootFS 嵌入静态文件
//
//go:embed web
var webRootFS embed.FS
func initGUI(r *gin.Engine) {
webFS, err := fs.Sub(webRootFS, "web")
if err != nil {
zap.S().Panic("init gui err, cause: ", err)
}
guiStaticResource(r, webFS)
guiIndexHTML(r, webFS)
}
// html 缓存 HTML 页面
func html(html []byte) func(ctx *gin.Context) {
etag := fmt.Sprintf("W/%x", md5.Sum(html))
h := str.B2S(html)
return func(ctx *gin.Context) {
if ctx.IsAborted() {
return
}
ctx.Header("Content-Type", "text/html; charset=utf-8")
ctx.Header("Cache-Control", "public, must-revalidate")
ctx.Header("ETag", etag)
if match := ctx.GetHeader("If-None-Match"); match != "" {
if strings.Contains(match, etag) {
ctx.Status(http.StatusNotModified)
return
}
}
ctx.String(http.StatusOK, h)
}
}
func guiStaticResource(r *gin.Engine, sfs fs.FS) {
const (
assetsPrefix = "/assets"
staticPrefix = "/static"
stripPrefix = ""
)
staticResource := http.StripPrefix(stripPrefix, statigz.FileServer(sfs.(fs.ReadDirFS)))
r.GET(assetsPrefix+"/*w", func(c *gin.Context) {
staticResource.ServeHTTP(c.Writer, c.Request)
})
r.GET(staticPrefix+"/*w", func(c *gin.Context) {
staticResource.ServeHTTP(c.Writer, c.Request)
})
r.GET("/favicon.ico", func(c *gin.Context) {
staticResource.ServeHTTP(c.Writer, c.Request)
})
}
func guiIndexHTML(r *gin.Engine, sfs fs.FS) {
var indexHTML []byte
{
const indexFileName = "index.html"
index, err := sfs.Open(indexFileName)
if err != nil {
zap.S().Panicf("open [%s] err! cause: %e", indexFileName, err)
}
indexHTML, err = io.ReadAll(index)
if err != nil {
zap.S().Panicf("read [%s] err! cause: %e", indexFileName, err)
}
}
r.GET("/", html(indexHTML))
r.NoRoute(html(indexHTML))
}

View File

@ -0,0 +1,30 @@
package ext
import (
"demo-server/internal/config"
"demo-server/internal/middleware"
"github.com/gin-contrib/pprof"
"github.com/gin-gonic/gin"
"runtime"
)
/*
我有一个需要根据 go build tags 完成部分功能在编译时刻启用的需求
所以定义 initFunc 维持服务
当满足 tags 的时候加入 initEngQueue
*/
type initFunc func(e *gin.Engine)
var initEngQueue []initFunc
func Init(e *gin.Engine, cfg *config.Configuration) {
if cfg.Svr.PprofOn {
runtime.SetMutexProfileFraction(1)
runtime.SetBlockProfileRate(1)
pprof.Register(e)
}
e.Use(middleware.Cors())
initEng(e)
initMetric(e)
}

View File

@ -0,0 +1,11 @@
package ext
import (
"github.com/gin-gonic/gin"
)
func initEng(s *gin.Engine) {
for _, initF := range initEngQueue {
initF(s)
}
}

View File

@ -0,0 +1,10 @@
package ext
import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
func initMetric(e *gin.Engine) {
e.GET("/metrics", gin.WrapH(promhttp.Handler()))
}

View File

@ -1,11 +1,11 @@
package server package server
import ( import (
"demo-server/internal/model"
"github.com/gin-contrib/pprof" "github.com/gin-contrib/pprof"
gzap "github.com/gin-contrib/zap" gzap "github.com/gin-contrib/zap"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"go.uber.org/zap" "go.uber.org/zap"
"packet-ui/internal/model"
"runtime" "runtime"
"time" "time"
) )

View File

@ -2,12 +2,12 @@ package server
import ( import (
"context" "context"
"demo-server/internal/model"
"demo-server/internal/model/server"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"go.uber.org/zap" "go.uber.org/zap"
"net" "net"
"net/http" "net/http"
"packet-ui/internal/model"
"packet-ui/internal/model/server"
"time" "time"
) )

View File

@ -2,11 +2,11 @@ package server
import ( import (
"crypto/md5" "crypto/md5"
"demo-server/pkg"
"embed" "embed"
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"net/http" "net/http"
"packet-ui/pkg"
"strings" "strings"
) )

View File

@ -1,6 +1,6 @@
package main package main
import "packet-ui/cmd" import "demo-server/cmd"
func main() { func main() {
cmd.Execute() cmd.Execute()

View File

@ -1,19 +0,0 @@
package pkg
type signal struct{}
var def signal
type SignalChan chan signal
func NewSingleChan() SignalChan {
return make(chan signal)
}
func NewBufferSingleChan(size int) SignalChan {
return make(chan signal, size)
}
func (c SignalChan) Send() {
c <- def
}

View File

@ -1,30 +0,0 @@
package pkg
import (
"sync"
"testing"
)
func TestChan(t *testing.T) {
t.Run("close", func(t *testing.T) {
ch := NewSingleChan()
wg := sync.WaitGroup{}
wg.Add(2)
go func() {
ch.Send()
//ch.Send()
close(ch)
tmp, ok := <-ch
t.Log("after close", tmp, ok)
wg.Done()
}()
go func() {
tmp, ok := <-ch
t.Log(tmp, ok)
wg.Done()
}()
wg.Wait()
})
}

15
pkg/channel/any.go Normal file
View File

@ -0,0 +1,15 @@
package channel
type Any[T any] chan T
func NewAny[T any]() Any[T] {
return make(Any[T])
}
func NewBufferAny[T any](i int) Any[T] {
return make(Any[T], i)
}
func (ch Any[T]) Send(v T) {
ch <- v
}

116
pkg/channel/chan_test.go Normal file
View File

@ -0,0 +1,116 @@
package channel
import (
"sync"
"testing"
"time"
)
func TestChan(t *testing.T) {
t.Run("close", func(t *testing.T) {
ch := NewSingle()
wg := sync.WaitGroup{}
wg.Add(2)
go func() {
ch.Send()
//ch.Send()
close(ch)
tmp, ok := <-ch
t.Log("after close", tmp, ok)
wg.Done()
}()
go func() {
time.Sleep(time.Second)
tmp, ok := <-ch
t.Log(tmp, ok)
wg.Done()
}()
wg.Wait()
})
}
func TestChClose(t *testing.T) {
const (
buffer = 10
)
t.Run("send-finish-read-after", func(t *testing.T) {
t.Run("for", func(t *testing.T) {
ch := NewBufferSingle(buffer)
wg := new(sync.WaitGroup)
wg.Add(2)
go func() {
for i := 0; i < buffer; i++ {
ch.Send()
}
close(ch)
wg.Done()
t.Log("send fin, close. ")
}()
go func() {
time.Sleep(time.Second)
for i := 0; i < buffer+1; i++ {
s, ok := <-ch
t.Log(s, ok)
}
wg.Done()
}()
wg.Wait()
})
t.Run("for-range", func(t *testing.T) {
ch := NewBufferSingle(buffer)
wg := new(sync.WaitGroup)
wg.Add(2)
go func() {
for i := 0; i < buffer; i++ {
ch.Send()
}
close(ch)
t.Log("send fin, close. ")
wg.Done()
}()
go func() {
time.Sleep(time.Second)
for s := range ch {
t.Log(s)
}
wg.Done()
}()
wg.Wait()
})
})
}
func TestIntChan(t *testing.T) {
wg := sync.WaitGroup{}
wg.Add(2)
buf := 5
ch := NewBufferInt(buf)
go func() {
defer wg.Done()
for cnt := buf; cnt > 0; cnt-- {
ch.Send(1)
}
// 不 close ranger read 会永久阻塞
close(ch)
}()
go func() {
defer wg.Done()
<-time.After(time.Second)
t.Logf("channel len:%d cap: %d\n", len(ch), cap(ch))
for i := range ch {
_ = i
}
}()
wg.Wait()
t.Logf("channel len:%d cap: %d\n", len(ch), cap(ch))
}

15
pkg/channel/int.go Normal file
View File

@ -0,0 +1,15 @@
package channel
type Int chan int
func NewInt() Int {
return make(Int)
}
func NewBufferInt(size int) Int {
return make(Int, size)
}
func (c Int) Send(i int) {
c <- i
}

23
pkg/channel/signal.go Normal file
View File

@ -0,0 +1,23 @@
package channel
type signal struct{}
var def signal
type Signal chan signal
func NewSingle() Signal {
return make(Signal)
}
func NewBufferSingle(size int) Signal {
return make(Signal, size)
}
func (c Signal) Send() {
c <- def
}
func (c Signal) Read() {
<-c
}

View File

@ -0,0 +1 @@
package channel

88
pkg/confg/config.go Normal file
View File

@ -0,0 +1,88 @@
package confg
import (
"bytes"
"github.com/fsnotify/fsnotify"
"github.com/goccy/go-json"
"github.com/spf13/viper"
"go.uber.org/zap"
"path/filepath"
"sync"
)
type RWLock interface {
sync.Locker
RLock()
RUnlock()
}
type Config interface {
RWLock
}
func WriteConf(path string, conf any) (err error) {
var data []byte
data, err = json.Marshal(conf)
v := newViper(path)
if err = v.ReadConfig(bytes.NewBuffer(data)); err != nil {
return
}
v.SetConfigType(filepath.Ext(path))
err = v.WriteConfig()
return
}
func ReadConf(path string, conf any) error {
return updateConfig(newViper(path), conf)
}
func ReadConfAndWatch(path string, conf Config, up func()) (err error) {
v := newViper(path)
if err = updateConfig(v, conf); err != nil {
return
}
onUpdate := func(in fsnotify.Event) {
zap.L().Info("watch file", zap.Any("event", in))
if in.Name == "WRITE" {
conf.Lock()
defer conf.Unlock()
err = updateConfig(v, conf)
}
if err != nil {
zap.L().Error("auto update config err", zap.String("path", path), zap.Error(err))
return
}
if up != nil {
up()
}
}
// watch config WRITE event reread config
v.OnConfigChange(onUpdate)
v.WatchConfig()
return
}
func newViper(path string) (v *viper.Viper) {
v = viper.New()
v.SetConfigFile(path)
ext := filepath.Ext(path)
v.SetConfigType(ext[1:])
return
}
func updateConfig(v *viper.Viper, conf any) (err error) {
if err = v.ReadInConfig(); err != nil {
return
}
if err = v.Unmarshal(conf); err != nil {
return
}
return
}

40
pkg/confg/config_test.go Normal file
View File

@ -0,0 +1,40 @@
package confg
import (
"btdp-agent-admin/pkg/str"
"encoding/json"
"sync"
"testing"
)
func TestUn(t *testing.T) {
type Config struct {
Type string `json:"type" mapstructure:"type" yaml:"type"`
VXLanSvr string `json:"vxlan_svr" mapstructure:"vxlan_svr" yaml:"vxlan_svr"`
Loop int64 `json:"loop" mapstructure:"loop" yaml:"loop"`
Interval int `json:"interval" mapstructure:"interval" yaml:"interval"`
DataPath []string `json:"data_path" mapstructure:"data_path" yaml:"data_path"`
MaxTime int64 `json:"max_time" mapstructure:"max_time" yaml:"max_time"`
}
type Refer struct {
sync.RWMutex
Config `json:"runner" mapstructure:"runner" yaml:"runner"`
}
t.Run("write", func(t *testing.T) {
r := new(Refer)
r.DataPath = append(r.DataPath, "testing")
t.Log(WriteConf("./testing/x.yaml", r))
})
t.Run("read", func(t *testing.T) {
r := new(Refer)
if err := ReadConf("../conf/runner.yaml", r); err != nil {
t.Fatal(err)
}
res, err := json.Marshal(r)
t.Log(str.B2S(res), err)
})
}

View File

@ -1,15 +0,0 @@
package db
import (
"gorm.io/gorm"
)
const (
TypeSQLITE = "sqlite"
TypeMYSQL = "mysql"
)
type Client struct {
Type string
*gorm.DB
}

View File

@ -1,5 +0,0 @@
package db
const (
ErrOpenFormat = "type [%s] open err, cause: %w"
)

View File

@ -1,4 +1,4 @@
package pkg package errs
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package pkg package errs
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package pkg package file
import ( import (
"bufio" "bufio"
@ -31,3 +31,11 @@ func ScanFile(path string, handle func(data string) (err error)) (err error) {
return nil return nil
} }
func Size(path string) (int64, error) {
info, err := os.Stat(path)
if err != nil {
return 0, err
}
return info.Size(), err
}

View File

@ -1,14 +1,15 @@
package pkg package file
import ( import (
"crypto/md5" "crypto/md5"
"fmt" "fmt"
"os" "os"
"path/filepath"
"testing" "testing"
) )
func TestFileMD5(t *testing.T) { func TestFileMD5(t *testing.T) {
t.Log(FileMD5("test.txt")) t.Log(MD5("test.txt"))
readFile, err := os.ReadFile("test.txt") readFile, err := os.ReadFile("test.txt")
if err != nil { if err != nil {
@ -43,3 +44,21 @@ func TestS(t *testing.T) {
} }
}) })
} }
func TestScan(t *testing.T) {
var (
err error
head string
)
err = DirScan("../data", func(dir string, info os.FileInfo) {
if info.IsDir() {
head = "-"
} else {
head = "|"
}
t.Log(head, filepath.Join(dir, info.Name()), "size(byte):", info.Size())
})
if err != nil {
t.Fatal(err)
}
}

139
pkg/file/file_util.go Normal file
View File

@ -0,0 +1,139 @@
package file
import (
"btdp-agent-admin/pkg/md5"
"errors"
"go.uber.org/zap"
"io"
"mime/multipart"
os "os"
"path/filepath"
"strings"
)
func Save(r io.Reader, dst string) (err error) {
var (
fp string
out io.WriteCloser
)
fp = filepath.Dir(dst)
err = DirCheck(fp)
out, err = os.Create(dst)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, r)
return
}
// MD5 计算文件 md5 值
func MD5(file string) (string, error) {
f, err := os.Open(file)
if err != nil {
return "", err
}
info, err := f.Stat()
if err != nil {
return "", err
}
size := info.Size()
return md5.Sum(f, size, BlockSize, ChunkSize)
}
func MultipartMD5(fh *multipart.FileHeader) (string, error) {
f, err := fh.Open()
if err != nil {
return "", err
}
size := fh.Size
return md5.Sum(f, size, BlockSize, ChunkSize)
}
const (
BlockSize = 1 << 10
ChunkSize = BlockSize << 2
)
var (
ErrorIsNotDir = errors.New("path is not dir. ")
)
func DirCheck(dir string) (err error) {
var fi os.FileInfo
if fi, err = os.Stat(dir); err != nil {
if !os.IsExist(err) || os.IsNotExist(err) {
return DirMk(dir)
}
}
if !fi.IsDir() {
err = ErrorIsNotDir
}
return
}
func DirMk(dir string) error {
return os.MkdirAll(dir, 0755)
}
const MaxScanDeep = 20
var maxDeepError = errors.New("scanMaxDeep")
func IsScanMaxDeep(err error) bool {
return errors.Is(err, maxDeepError)
}
// DirScan 扫描目录on err skip
func DirScan(dir string, h func(dir string, info os.FileInfo)) error {
return dirScan(dir, h, 0)
}
func dirScanIgnore(name string) bool {
return strings.HasPrefix(name, ".") ||
strings.Contains(strings.ToLower(name), "readme")
}
func dirScan(dir string, h func(dir string, info os.FileInfo), deep int) error {
if deep > MaxScanDeep {
return maxDeepError
}
dirs, err := os.ReadDir(dir)
if err != nil {
return err
}
var (
info os.FileInfo
nextDir string
)
for _, entry := range dirs {
info, err = entry.Info()
if err != nil {
zap.L().Warn("scan err", zap.String("name", entry.Name()), zap.Error(err))
continue
}
if dirScanIgnore(info.Name()) {
continue
}
h(dir, info)
if entry.IsDir() {
nextDir = filepath.Join(dir, info.Name())
if err = dirScan(nextDir, h, deep+1); err != nil {
zap.L().Warn("scan err", zap.String("dirname", nextDir), zap.Error(err))
}
}
}
return err
}

View File

@ -1,84 +0,0 @@
package pkg
import (
"errors"
"io"
"mime/multipart"
"os"
"packet-ui/pkg/md5"
"path/filepath"
)
func FileSave(r io.Reader, dst string) (err error) {
var (
fp string
out io.WriteCloser
)
fp, _ = filepath.Split(dst)
err = DirCheck(fp)
out, err = os.Create(dst)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, r)
return
}
// FileMD5 计算文件 md5 值
func FileMD5(file string) (string, error) {
f, err := os.Open(file)
if err != nil {
return "", err
}
info, err := f.Stat()
if err != nil {
return "", err
}
size := info.Size()
return md5.Sum(f, size, BlockSize, ChunkSize)
}
func MultipartMD5(fh *multipart.FileHeader) (string, error) {
f, err := fh.Open()
if err != nil {
return "", err
}
size := fh.Size
return md5.Sum(f, size, BlockSize, ChunkSize)
}
const (
BlockSize = 1 << 10
ChunkSize = BlockSize << 2
)
var (
ErrorIsNotDir = errors.New("path is not dir. ")
)
func DirCheck(dir string) (err error) {
var fi os.FileInfo
if fi, err = os.Stat(dir); err != nil {
if !os.IsExist(err) || os.IsNotExist(err) {
return DirMk(dir)
}
}
if !fi.IsDir() {
err = ErrorIsNotDir
}
return
}
func DirMk(dir string) error {
return os.MkdirAll(dir, 0755)
}

9
pkg/log/gin.go Normal file
View File

@ -0,0 +1,9 @@
package log
import "go.uber.org/zap"
type GinDebugFunc = func(httpMethod, absolutePath, handlerName string, _ int)
func GinRouter(httpMethod, absolutePath, handlerName string, handlersNum int) {
zap.S().Infof("%v\t%v\t", httpMethod, absolutePath)
}

View File

@ -7,7 +7,7 @@ import (
"moul.io/zapgorm2" "moul.io/zapgorm2"
) )
func GormZapLogger() logger.Interface { func GormZapLogger(level string) logger.Interface {
l := zapgorm2.New(zap.L()) l := zapgorm2.New(zap.L())
l.SetAsDefault() // optional: configure gorm to use this zapgorm.Logger for callbacks l.SetAsDefault() // optional: configure gorm to use this zapgorm.Logger for callbacks
l.LogLevel = gormLogger.Info l.LogLevel = gormLogger.Info

10
pkg/log/kafka.go Normal file
View File

@ -0,0 +1,10 @@
package log
import (
"github.com/segmentio/kafka-go"
"go.uber.org/zap"
)
var KafkaInfo = kafka.LoggerFunc(zap.S().Infof)
var KafkaErr = kafka.LoggerFunc(zap.S().Errorf)

39
pkg/log/test.go Normal file
View File

@ -0,0 +1,39 @@
package log
import (
"context"
"gorm.io/gorm/logger"
"testing"
"time"
)
type goTest struct {
t *testing.T
}
func (g *goTest) LogMode(_ logger.LogLevel) logger.Interface {
return g
}
func (g *goTest) Info(_ context.Context, s string, i ...interface{}) {
g.t.Log("Info: ", s, i)
}
func (g *goTest) Warn(_ context.Context, s string, i ...interface{}) {
g.t.Log("Warn: ", s, i)
}
func (g *goTest) Error(_ context.Context, s string, i ...interface{}) {
g.t.Log("Error: ", s, i)
}
func (g *goTest) Trace(_ context.Context, begin time.Time, fc func() (sql string, rowsAffected int64), err error) {
sql, affected := fc()
g.t.Log(time.Now().Sub(begin), "\tSQL:", sql, "\tAffected:", affected, "\tErr:", err)
}
func GoTest(t *testing.T) logger.Interface {
return &goTest{t: t}
}

35
pkg/math.go Normal file
View File

@ -0,0 +1,35 @@
package pkg
import "math"
func MinInt(a ...int) int {
min := math.MaxInt
for _, i := range a {
if min > i {
min = i
}
}
return min
}
const (
ByteBit = 1 << 3 << iota
_
I32Bit
I64Bit
)
const (
I32Byte = I32Bit / ByteBit
I64Byte = I64Bit / ByteBit
)
func Int64Byte(i64 int64) [I64Byte]byte {
res := [I64Byte]byte{}
for i := 0; i < I64Byte; i++ {
res[I64Byte-1-i] = byte(i64 & 0xff)
i64 >>= ByteBit
}
return res
}

View File

@ -41,3 +41,43 @@ func partitionSum(r io.Reader, size, blockSize int64) (string, error) {
fileMD5 := fmt.Sprintf("%x", hash.Sum(nil)) fileMD5 := fmt.Sprintf("%x", hash.Sum(nil))
return fileMD5, nil return fileMD5, nil
} }
func SumWithHandler(r io.Reader, size, blockSize, chunkSize int64, h func(buf []byte) error) (string, error) {
if size > chunkSize {
// 分段计算
return partitionSumWithHandler(r, size, blockSize, h)
}
data, err := io.ReadAll(r)
if err != nil {
return "", err
}
h(data)
fileMD5 := fmt.Sprintf("%x", md5.Sum(data))
return fileMD5, nil
}
func partitionSumWithHandler(r io.Reader, size, blockSize int64, h func(buf []byte) error) (string, error) {
buf := make([]byte, blockSize)
hash := md5.New()
var err error
for size > blockSize {
size -= blockSize
if _, err = r.Read(buf); err != nil {
return "", err
}
hash.Write(buf)
if err = h(buf); err != nil {
return "", err
}
}
buf = buf[:size]
if _, err = r.Read(buf); err != nil {
return "", err
}
hash.Write(buf)
h(buf)
fileMD5 := fmt.Sprintf("%x", hash.Sum(nil))
return fileMD5, nil
}

View File

@ -1,27 +0,0 @@
package pkg
import (
"fmt"
"math/rand"
"time"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
func RandMac() string {
return fmt.Sprintf("%02x:%02x:%02x:%02x:%02x:%02x", rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256))
}
func RandIp() string {
return fmt.Sprintf("%d.%d.%d.%d", rand.Intn(256), rand.Intn(256), rand.Intn(256), rand.Intn(256))
}
func RandPort() uint16 {
return uint16(rand.Intn(35536) + 30000)
}
func RandVni() uint32 {
return uint32(rand.Intn(1 << 24))
}

View File

@ -1 +0,0 @@
package pkg

View File

@ -1,4 +1,4 @@
package pkg package set
import ( import (
"strings" "strings"

View File

@ -1,4 +1,4 @@
package pkg package set
import ( import (
"testing" "testing"

View File

@ -1,37 +0,0 @@
package sqlite
//
//import (
// "fmt"
// "gorm.io/driver/sqlite"
// "gorm.io/gorm"
// "packet-ui/pkg/db"
// "packet-ui/pkg/log"
//)
//
//const (
// DBType = "sqlite"
// // DSN as default dsn
// DSN = "data.db"
//)
//
//func Open(dsn string) (*db.Client, error) {
// // 全局模式
// if len(dsn) == 0 {
// dsn = DSN
// }
//
// dbc, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{
// PrepareStmt: true,
// Logger: log.GormZapLogger(),
// })
//
// if err != nil {
// err = fmt.Errorf(db.ErrOpenFormat, DBType, err)
// }
//
// return &db.Client{
// Type: DBType,
// DB: dbc,
// }, err
//}

View File

@ -1,14 +0,0 @@
package sqlite
//import (
// "testing"
//)
//
//func TestOpenDB(t *testing.T) {
// _, err := Open("")
//
// if err != nil {
// t.Fatal(err)
// }
//
//}

View File

@ -1,4 +1,4 @@
package pkg package str
import ( import (
"testing" "testing"

17
pkg/str/str_conv_test.go Normal file
View File

@ -0,0 +1,17 @@
package str
import "testing"
func TestConv(t *testing.T) {
tmp := "测试字符"
bs := []byte("测试字符")
t.Run("byte -> string", func(t *testing.T) {
t.Log(B2S(bs))
})
t.Run("string -> byte", func(t *testing.T) {
t.Log(S2B(tmp))
})
t.Run("string -> byte -> string", func(t *testing.T) {
t.Log(B2S(S2B(tmp)))
})
}

View File

@ -1,4 +1,4 @@
package pkg package str
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package pkg package str
import ( import (
"reflect" "reflect"

View File

@ -1,4 +1,4 @@
package pkg package str
import ( import (
"github.com/google/uuid" "github.com/google/uuid"
@ -7,3 +7,7 @@ import (
func UUID() string { func UUID() string {
return uuid.New().String() return uuid.New().String()
} }
func UUIDN(n int) string {
return uuid.New().String()[:n]
}