2014-07-23 03:55:32 +04:00
|
|
|
package ledis
|
|
|
|
|
2014-07-23 04:30:00 +04:00
|
|
|
import (
|
2014-07-25 20:46:03 +04:00
|
|
|
"fmt"
|
2014-07-23 04:30:00 +04:00
|
|
|
"github.com/siddontang/copier"
|
2014-07-25 13:58:00 +04:00
|
|
|
"github.com/siddontang/ledisdb/store"
|
2014-07-23 04:30:00 +04:00
|
|
|
"path"
|
|
|
|
)
|
|
|
|
|
2014-07-23 03:55:32 +04:00
|
|
|
type Config struct {
|
|
|
|
DataDir string `json:"data_dir"`
|
|
|
|
|
|
|
|
DB struct {
|
2014-07-25 20:46:03 +04:00
|
|
|
Name string `json:"name"`
|
|
|
|
Compression bool `json:"compression"`
|
|
|
|
BlockSize int `json:"block_size"`
|
|
|
|
WriteBufferSize int `json:"write_buffer_size"`
|
|
|
|
CacheSize int `json:"cache_size"`
|
|
|
|
MaxOpenFiles int `json:"max_open_files"`
|
|
|
|
MapSize int `json:"map_size"`
|
2014-07-23 03:55:32 +04:00
|
|
|
} `json:"db"`
|
|
|
|
|
|
|
|
BinLog struct {
|
|
|
|
Use bool `json:"use"`
|
|
|
|
MaxFileSize int `json:"max_file_size"`
|
|
|
|
MaxFileNum int `json:"max_file_num"`
|
|
|
|
} `json:"binlog"`
|
|
|
|
}
|
2014-07-23 04:30:00 +04:00
|
|
|
|
2014-07-25 13:58:00 +04:00
|
|
|
func (cfg *Config) NewDBConfig() *store.Config {
|
2014-07-25 20:46:03 +04:00
|
|
|
if len(cfg.DB.Name) == 0 {
|
|
|
|
fmt.Printf("no store set, use default %s\n", store.DefaultStoreName)
|
|
|
|
cfg.DB.Name = store.DefaultStoreName
|
|
|
|
}
|
2014-07-23 04:30:00 +04:00
|
|
|
|
2014-07-25 13:58:00 +04:00
|
|
|
dbCfg := new(store.Config)
|
2014-07-23 04:30:00 +04:00
|
|
|
copier.Copy(dbCfg, &cfg.DB)
|
2014-07-25 20:46:03 +04:00
|
|
|
|
|
|
|
dbPath := path.Join(cfg.DataDir, fmt.Sprintf("%s_data", cfg.DB.Name))
|
|
|
|
|
2014-07-23 04:30:00 +04:00
|
|
|
dbCfg.Path = dbPath
|
|
|
|
return dbCfg
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cfg *Config) NewBinLogConfig() *BinLogConfig {
|
|
|
|
binLogPath := path.Join(cfg.DataDir, "bin_log")
|
|
|
|
c := new(BinLogConfig)
|
|
|
|
copier.Copy(c, &cfg.BinLog)
|
|
|
|
c.Path = binLogPath
|
|
|
|
return c
|
|
|
|
}
|