背景:
随业务增长,表中数据量越来越大。业务方只查询近一周的数据。与研发确认log表只保留100w数据,定期转存到历史表中。这段时间刚好接触golang,顺便用这个小需求练练手。
前期准备:
- [go get github.com/go-sql-driver/mysql ]
- golang 1.18
目录结构:
`job_log`
| -- dbutil
| -- sqlutil.go
| -- go.mod
| -- go.sum
| -- logs
| -- alter.log
| -- logutil
| -- logutil.go
| -- testmain.go
//生成go.mod 和 go.sum文件
go mod init joblog
代码:
testmain.go
package main
import (
"joblog/dbutil"
//"joblog/logutil"
//"fmt"
)
//定时将job_log表中数据与入到history表中 然后清理掉log表数据
//记录history表中最大id 取出log表中;最小id 写入history表
//删除log表中数据
const (
RESERVEDATA int64 = 1000000
)
//get max id from table fill to struct
func getMaxvalue(si * dbutil.Sqlinfo)error{
err :=si.GetMaxOrMinId("source","id","max")
if err != nil{
//logutil.Logger.Println(err.Error())
return err
}
//get max id from history table
err = si.GetMaxOrMinId("target","id","max")
if err != nil{
//logutil.Logger.Println(err.Error())
return err
}
return nil
}
//migrate log table data to history
//left 100w data in log
func migrateDate(si *dbutil.Sqlinfo){
if distance := si.SourceMaxid - si.TargetMaxid;distance > RESERVEDATA{
si.MigrateData(distance,RESERVEDATA)
}
}
func main(){
//区分线上和测试环境连接串
err := dbutil.InitDB("test")
defer dbutil.CloseDB()
if err != nil{
return
}
var si = dbutil.Sqlinfo{"xxx_log_his","xxx_log",0,0}
err = getMaxvalue(&si)
if err != nil{
return
}
migrateDate(&si)
//logutil.Logger.Println(&si)
}
dbutil/sqlutil.go
package dbutil
import(
_ "github.com/go-sql-driver/mysql"
"joblog/logutil"
"database/sql"
)
var db *sql.DB
type Sqlinfo struct{
TargetTableName string //table_name
SourceTableName string
TargetMaxid int64 //target_table maxid
SourceMaxid int64 //source_table maxid
//Is_target bool //target_table exists
}
func InitDB(env string) error{
var connstr string ;
//连接串需要替换
if env == "test"{
connstr = "test_user:test_pass@tcp(test_ip:port)/dbname"
}else{
connstr = "user:pass@tcp(ip:port)/dbname"
}
var err error
db,err = sql.Open("mysql",connstr)
if err != nil{
logutil.Logger.Println(err)
return err
}
err = db.Ping()
if err != nil{
logutil.Logger.Println(err)
return err
}
db.SetMaxIdleConns(5)
db.SetMaxOpenConns(15)
return nil
}
func CloseDB() {
db.Close()
}
//get maxid by table_name
func (si *Sqlinfo)GetMaxOrMinId(stname,column string,maxormin string)error{
var dbsql string
if stname =="target"{
dbsql = "select "+maxormin+"("+column+") rowcnt "+" from "+si.TargetTableName
} else{
dbsql = "select "+maxormin+"("+column+") rowcnt "+" from "+si.SourceTableName
}
var rowcnt int64 =0
//logutil.Logger.Println(dbsql)
rows,err:= db.Query(dbsql)
if err != nil{
logutil.Logger.Println(err)
return err
}
defer rows.Close()
for rows.Next(){
err = rows.Scan(&rowcnt)
if err != nil{
logutil.Logger.Println(err)
//return err
rowcnt = 0
}
}
if stname == "target"{
si.TargetMaxid = rowcnt
}else{
si.SourceMaxid = rowcnt
}
return nil
}
//insert into his data
//delete from log table
func (si* Sqlinfo)MigrateData(distance ,reservedata int64) error{
//require insert maxid
insertmaxid := si.TargetMaxid+distance - reservedata
insertsql := "insert into "+si.TargetTableName+ " select * from "+ si.SourceTableName +" where id < ? "
deletesql := "delete from "+si.SourceTableName+ " where id < ? "
tx,err := db.Begin()
if err != nil{
if tx != nil {
_ = tx.Rollback()
}
logutil.Logger.Println(err)
return err
}
result,err := tx.Exec(insertsql,insertmaxid)
logutil.Logger.Println(insertsql,insertmaxid)
if err != nil{
if tx != nil {
_ = tx.Rollback()
}
logutil.Logger.Println(err)
return err
}
result,err = tx.Exec(deletesql,insertmaxid)
logutil.Logger.Println(deletesql,result)
if err != nil{
if tx != nil {
_ = tx.Rollback()
}
logutil.Logger.Println(err)
return err
}
tx.Commit()
return nil
}
logutil/logutil.go
package logutil
import (
"log"
"os"
)
var Logger *log.Logger
const(
FILENAME string = "/home/test/joblog/logs/alert.log"
)
func init(){
logfile,err := os.OpenFile(FILENAME,os.O_CREATE|os.O_APPEND|os.O_WRONLY,0644)
//defer logfile.Close()
if err != nil{
log.Panic("Open file failed")
}
Logger = log.New(logfile,"tedu_ ",log.Ldate|log.Ltime|log.Lshortfile)
}
结束:
代码非常简单,只是连上库查出相关id,开启事务插入数据到历史表,删除log表数据。如果第一次使用需要手动转存表中数据 避免一次插入过多数据,出现问题。再用crontab -e 部署定时任务。