2016-09-09 02:11:53 +03:00
|
|
|
package tests
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2020-04-08 21:36:37 +03:00
|
|
|
"math/rand"
|
2016-09-09 02:11:53 +03:00
|
|
|
"os"
|
2016-12-02 19:14:34 +03:00
|
|
|
"os/signal"
|
|
|
|
"syscall"
|
2016-09-09 02:11:53 +03:00
|
|
|
"testing"
|
2020-04-08 21:36:37 +03:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/gomodule/redigo/redis"
|
2016-09-09 02:11:53 +03:00
|
|
|
)
|
|
|
|
|
2016-12-02 19:14:34 +03:00
|
|
|
const (
|
|
|
|
clear = "\x1b[0m"
|
|
|
|
bright = "\x1b[1m"
|
|
|
|
dim = "\x1b[2m"
|
|
|
|
black = "\x1b[30m"
|
|
|
|
red = "\x1b[31m"
|
|
|
|
green = "\x1b[32m"
|
|
|
|
yellow = "\x1b[33m"
|
|
|
|
blue = "\x1b[34m"
|
|
|
|
magenta = "\x1b[35m"
|
|
|
|
cyan = "\x1b[36m"
|
|
|
|
white = "\x1b[37m"
|
|
|
|
)
|
2016-09-09 02:11:53 +03:00
|
|
|
|
2016-12-02 19:14:34 +03:00
|
|
|
func TestAll(t *testing.T) {
|
2020-04-08 21:36:37 +03:00
|
|
|
mockCleanup(false)
|
|
|
|
defer mockCleanup(false)
|
2016-09-09 02:11:53 +03:00
|
|
|
|
2022-09-23 17:30:03 +03:00
|
|
|
ch := make(chan os.Signal, 1)
|
2016-12-02 19:14:34 +03:00
|
|
|
signal.Notify(ch, os.Interrupt, syscall.SIGTERM)
|
2016-09-09 02:11:53 +03:00
|
|
|
go func() {
|
2016-12-02 19:14:34 +03:00
|
|
|
<-ch
|
2020-04-08 21:36:37 +03:00
|
|
|
mockCleanup(false)
|
2016-12-02 19:14:34 +03:00
|
|
|
os.Exit(1)
|
2016-09-09 02:11:53 +03:00
|
|
|
}()
|
|
|
|
|
2022-09-24 03:34:09 +03:00
|
|
|
mc, err := mockOpenServer(false, true)
|
2016-09-09 02:11:53 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2016-12-02 19:14:34 +03:00
|
|
|
defer mc.Close()
|
2022-09-24 03:34:09 +03:00
|
|
|
|
|
|
|
// mc2, err := mockOpenServer(false, false)
|
|
|
|
// if err != nil {
|
|
|
|
// t.Fatal(err)
|
|
|
|
// }
|
|
|
|
// defer mc2.Close()
|
|
|
|
// mc.alt = mc2
|
|
|
|
// mc2.alt = mc
|
|
|
|
|
2016-12-02 19:14:34 +03:00
|
|
|
runSubTest(t, "keys", mc, subTestKeys)
|
2016-12-12 20:33:28 +03:00
|
|
|
runSubTest(t, "json", mc, subTestJSON)
|
2017-01-31 02:41:12 +03:00
|
|
|
runSubTest(t, "search", mc, subTestSearch)
|
2019-02-09 00:58:04 +03:00
|
|
|
runSubTest(t, "testcmd", mc, subTestTestCmd)
|
2017-02-12 17:58:03 +03:00
|
|
|
runSubTest(t, "fence", mc, subTestFence)
|
Lua scripting feature. (#224)
* Start on lua scripting
* Implement evalsha, script load, script exists, and script flush
* Type conversions from lua to resp/json.
Refactor to make luastate and luascripts persistent in the controller.
* Change controller.command and all underlying commands to return resp.Value.
Serialize only during the ouput.
* First stab at tile38 call from lua
* Change tile38 into tile38.call in Lua
* Property return errors from scripts
* Minor refactoring. No locking on script run
* Cleanup/refactoring
* Create a pool of 5 lua states, allow for more as needed. Refactor.
* Use safe map for scripts. Add a limit for max number of lua states. Refactor.
* Refactor
* Refactor script commands into atomic, read-only, and non-atomic classes.
Proper locking for all three classes.
Add tests for scripts
* More tests for scripts
* Properly escape newlines in lua-produced errors
* Better test for readonly failure
* Correctly convert ok/err messages between lua and resp.
Add pcall, sha1hex, error_reply, status_reply functions to tile38 namespace in lua.
* Add pcall test. Change writeErr to work with string argument
* Make sure eval/evalsha never attempt to write AOF
* Add eval-set and eval-get to benchmarks
* Fix eval benchmark tests, add more
* Improve benchmarks
* Optimizations and refactoring.
* Add lua memtest
* Typo
* Add dependency
* golint fixes
* gofmt fixes
* Add scripting commands to the core/commands.json
* Use ARGV for args inside lua
2017-10-05 18:20:40 +03:00
|
|
|
runSubTest(t, "scripts", mc, subTestScripts)
|
2019-01-15 21:08:19 +03:00
|
|
|
runSubTest(t, "info", mc, subTestInfo)
|
|
|
|
runSubTest(t, "client", mc, subTestClient)
|
2019-04-24 23:20:57 +03:00
|
|
|
runSubTest(t, "timeouts", mc, subTestTimeout)
|
2021-05-12 04:41:47 +03:00
|
|
|
runSubTest(t, "metrics", mc, subTestMetrics)
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
|
|
|
|
2016-12-02 19:14:34 +03:00
|
|
|
func runSubTest(t *testing.T, name string, mc *mockServer, test func(t *testing.T, mc *mockServer)) {
|
|
|
|
t.Run(name, func(t *testing.T) {
|
|
|
|
fmt.Printf(bright+"Testing %s\n"+clear, name)
|
|
|
|
test(t, mc)
|
|
|
|
})
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
2016-12-02 19:14:34 +03:00
|
|
|
|
|
|
|
func runStep(t *testing.T, mc *mockServer, name string, step func(mc *mockServer) error) {
|
2019-02-12 16:49:13 +03:00
|
|
|
t.Helper()
|
2016-12-02 19:14:34 +03:00
|
|
|
t.Run(name, func(t *testing.T) {
|
2019-02-12 16:49:13 +03:00
|
|
|
t.Helper()
|
2016-12-02 19:14:34 +03:00
|
|
|
if err := func() error {
|
|
|
|
// reset the current server
|
|
|
|
mc.ResetConn()
|
|
|
|
defer mc.ResetConn()
|
|
|
|
// clear the database so the test is consistent
|
2022-09-24 03:34:09 +03:00
|
|
|
if err := mc.DoBatch(
|
|
|
|
Do("OUTPUT", "resp").OK(),
|
|
|
|
Do("FLUSHDB").OK(),
|
|
|
|
); err != nil {
|
2016-12-02 19:14:34 +03:00
|
|
|
return err
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
2016-12-02 19:14:34 +03:00
|
|
|
if err := step(mc); err != nil {
|
|
|
|
return err
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
2016-12-02 19:14:34 +03:00
|
|
|
return nil
|
|
|
|
}(); err != nil {
|
2022-09-23 17:30:03 +03:00
|
|
|
fmt.Fprintf(os.Stderr, "["+red+"fail"+clear+"]: %s\n", name)
|
2016-12-02 19:14:34 +03:00
|
|
|
t.Fatal(err)
|
2022-09-23 17:30:03 +03:00
|
|
|
// t.Fatal(err)
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
2016-12-02 19:14:34 +03:00
|
|
|
fmt.Printf("["+green+"ok"+clear+"]: %s\n", name)
|
|
|
|
})
|
2016-09-09 02:11:53 +03:00
|
|
|
}
|
2020-04-08 21:36:37 +03:00
|
|
|
|
|
|
|
func BenchmarkAll(b *testing.B) {
|
|
|
|
mockCleanup(true)
|
|
|
|
defer mockCleanup(true)
|
|
|
|
|
2022-09-23 17:30:03 +03:00
|
|
|
ch := make(chan os.Signal, 1)
|
2020-04-08 21:36:37 +03:00
|
|
|
signal.Notify(ch, os.Interrupt, syscall.SIGTERM)
|
|
|
|
go func() {
|
|
|
|
<-ch
|
|
|
|
mockCleanup(true)
|
|
|
|
os.Exit(1)
|
|
|
|
}()
|
|
|
|
|
2022-09-24 03:34:09 +03:00
|
|
|
mc, err := mockOpenServer(true, true)
|
2020-04-08 21:36:37 +03:00
|
|
|
if err != nil {
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
|
|
|
defer mc.Close()
|
|
|
|
runSubBenchmark(b, "search", mc, subBenchSearch)
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadBenchmarkPoints(b *testing.B, mc *mockServer) (err error) {
|
|
|
|
const nPoints = 200000
|
|
|
|
rand.Seed(time.Now().UnixNano())
|
|
|
|
|
|
|
|
// add a bunch of points
|
|
|
|
for i := 0; i < nPoints; i++ {
|
|
|
|
val := fmt.Sprintf("val:%d", i)
|
|
|
|
var resp string
|
|
|
|
var lat, lon, fval float64
|
|
|
|
fval = rand.Float64()
|
|
|
|
lat = rand.Float64()*180 - 90
|
|
|
|
lon = rand.Float64()*360 - 180
|
|
|
|
resp, err = redis.String(mc.conn.Do("SET",
|
|
|
|
"mykey", val,
|
|
|
|
"FIELD", "foo", fval,
|
|
|
|
"POINT", lat, lon))
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if resp != "OK" {
|
|
|
|
err = fmt.Errorf("expected 'OK', got '%s'", resp)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func runSubBenchmark(b *testing.B, name string, mc *mockServer, bench func(t *testing.B, mc *mockServer)) {
|
|
|
|
b.Run(name, func(b *testing.B) {
|
|
|
|
bench(b, mc)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func runBenchStep(b *testing.B, mc *mockServer, name string, step func(mc *mockServer) error) {
|
|
|
|
b.Helper()
|
|
|
|
b.Run(name, func(b *testing.B) {
|
|
|
|
b.Helper()
|
|
|
|
if err := func() error {
|
|
|
|
// reset the current server
|
|
|
|
mc.ResetConn()
|
|
|
|
defer mc.ResetConn()
|
|
|
|
// clear the database so the test is consistent
|
|
|
|
if err := mc.DoBatch([][]interface{}{
|
|
|
|
{"OUTPUT", "resp"}, {"OK"},
|
|
|
|
{"FLUSHDB"}, {"OK"},
|
|
|
|
}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err := loadBenchmarkPoints(b, mc)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
b.ResetTimer()
|
|
|
|
for i := 0; i < b.N; i++ {
|
|
|
|
if err := step(mc); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}(); err != nil {
|
|
|
|
b.Fatal(err)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|