Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/issue 49 fix test cases #52

Merged
merged 2 commits into from
Oct 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions lib/column/date.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ func (dt *Date) ScanRow(dest interface{}, row int) error {
case **time.Time:
*d = new(time.Time)
**d = dt.row(row).Time
case *types.Date:
*d = dt.row(row)
case **types.Date:
*d = new(types.Date)
**d = dt.row(row)
default:
return &ColumnConverterError{
Op: "ScanRow",
Expand Down Expand Up @@ -96,6 +101,28 @@ func (dt *Date) Append(v interface{}) (nulls []uint8, err error) {
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
case []types.Date:
in := make([]int16, 0, len(v))
for _, t := range v {
if err := dateOverflow(minDate, maxDate, t.Time, "2006-01-02"); err != nil {
return nil, err
}
in = append(in, int16(t.Unix()/secInDay))
}
dt.values, nulls = append(dt.values, in...), make([]uint8, len(v))
case []*types.Date:
nulls = make([]uint8, len(v))
for i, v := range v {
switch {
case v != nil:
if err := dateOverflow(minDate, maxDate, (*v).Time, "2006-01-02"); err != nil {
return nil, err
}
dt.values = append(dt.values, int16(v.Unix()/secInDay))
default:
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
default:
return nil, &ColumnConverterError{
Op: "Append",
Expand All @@ -121,6 +148,18 @@ func (dt *Date) AppendRow(v interface{}) error {
}
date = int16(v.Unix() / secInDay)
}
case types.Date:
if err := dateOverflow(minDate, maxDate, v.Time, "2006-01-02"); err != nil {
return err
}
date = int16(v.Unix() / secInDay)
case *types.Date:
if v != nil {
if err := dateOverflow(minDate, maxDate, (*v).Time, "2006-01-02"); err != nil {
return err
}
date = int16(v.Unix() / secInDay)
}
case nil:
default:
return &ColumnConverterError{
Expand Down
39 changes: 39 additions & 0 deletions lib/column/date32.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ func (dt *Date32) ScanRow(dest interface{}, row int) error {
case **time.Time:
*d = new(time.Time)
**d = dt.row(row).Time
case *types.Date:
*d = dt.row(row)
case **types.Date:
*d = new(types.Date)
**d = dt.row(row)
default:
return &ColumnConverterError{
Op: "ScanRow",
Expand Down Expand Up @@ -96,6 +101,28 @@ func (dt *Date32) Append(v interface{}) (nulls []uint8, err error) {
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
case []types.Date:
in := make([]int32, 0, len(v))
for _, t := range v {
if err := dateOverflow(minDate32, maxDate32, t.Time, "2006-01-02"); err != nil {
return nil, err
}
in = append(in, timeToInt32(t.Time))
}
dt.values, nulls = append(dt.values, in...), make([]uint8, len(v))
case []*types.Date:
nulls = make([]uint8, len(v))
for i, v := range v {
switch {
case v != nil:
if err := dateOverflow(minDate32, maxDate32, (*v).Time, "2006-01-02"); err != nil {
return nil, err
}
dt.values = append(dt.values, timeToInt32((*v).Time))
default:
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
default:
return nil, &ColumnConverterError{
Op: "Append",
Expand All @@ -121,6 +148,18 @@ func (dt *Date32) AppendRow(v interface{}) error {
}
date = timeToInt32(*v)
}
case types.Date:
if err := dateOverflow(minDate32, maxDate32, v.Time, "2006-01-02"); err != nil {
return err
}
date = timeToInt32(v.Time)
case *types.Date:
if v != nil {
if err := dateOverflow(minDate32, maxDate32, (*v).Time, "2006-01-02"); err != nil {
return err
}
date = timeToInt32((*v).Time)
}
case nil:
default:
return &ColumnConverterError{
Expand Down
40 changes: 40 additions & 0 deletions lib/column/datetime.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package column

import (
"fmt"
"github.com/timeplus-io/proton-go-driver/v2/types"
"reflect"
"strings"
"time"
Expand Down Expand Up @@ -76,6 +77,11 @@ func (dt *DateTime) ScanRow(dest interface{}, row int) error {
case **time.Time:
*d = new(time.Time)
**d = dt.row(row)
case *types.Datetime:
*d = types.Datetime{dt.row(row)}
case **types.Datetime:
*d = new(types.Datetime)
**d = types.Datetime{dt.row(row)}
default:
return &ColumnConverterError{
Op: "ScanRow",
Expand Down Expand Up @@ -110,6 +116,28 @@ func (dt *DateTime) Append(v interface{}) (nulls []uint8, err error) {
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
case []types.Datetime:
in := make([]uint32, 0, len(v))
for _, t := range v {
if err := dateOverflow(minDateTime, maxDateTime, t.Time, "2006-01-02 15:04:05"); err != nil {
return nil, err
}
in = append(in, uint32(t.Unix()))
}
dt.values, nulls = append(dt.values, in...), make([]uint8, len(v))
case []*types.Datetime:
nulls = make([]uint8, len(v))
for i, v := range v {
switch {
case v != nil:
if err := dateOverflow(minDateTime, maxDateTime, (*v).Time, "2006-01-02 15:04:05"); err != nil {
return nil, err
}
dt.values = append(dt.values, uint32(v.Unix()))
default:
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
default:
return nil, &ColumnConverterError{
Op: "Append",
Expand All @@ -135,6 +163,18 @@ func (dt *DateTime) AppendRow(v interface{}) error {
}
datetime = uint32(v.Unix())
}
case types.Datetime:
if err := dateOverflow(minDateTime, maxDateTime, v.Time, "2006-01-02 15:04:05"); err != nil {
return err
}
datetime = uint32(v.Unix())
case *types.Datetime:
if v != nil {
if err := dateOverflow(minDateTime, maxDateTime, (*v).Time, "2006-01-02 15:04:05"); err != nil {
return err
}
datetime = uint32(v.Unix())
}
case nil:
default:
return &ColumnConverterError{
Expand Down
40 changes: 40 additions & 0 deletions lib/column/datetime64.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package column

import (
"fmt"
"github.com/timeplus-io/proton-go-driver/v2/types"
"math"
"reflect"
"strconv"
Expand Down Expand Up @@ -90,6 +91,11 @@ func (dt *DateTime64) ScanRow(dest interface{}, row int) error {
case **time.Time:
*d = new(time.Time)
**d = dt.row(row)
case *types.Datetime:
*d = types.Datetime{dt.row(row)}
case **types.Datetime:
*d = new(types.Datetime)
**d = types.Datetime{dt.row(row)}
default:
return &ColumnConverterError{
Op: "ScanRow",
Expand Down Expand Up @@ -126,6 +132,28 @@ func (dt *DateTime64) Append(v interface{}) (nulls []uint8, err error) {
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
case []types.Datetime:
in := make([]int64, 0, len(v))
for _, t := range v {
if err := dateOverflow(minDateTime64, maxDateTime64, t.Time, "2006-01-02 15:04:05"); err != nil {
return nil, err
}
in = append(in, dt.timeToInt64(t.Time))
}
dt.values, nulls = append(dt.values, in...), make([]uint8, len(v))
case []*types.Datetime:
nulls = make([]uint8, len(v))
for i, v := range v {
switch {
case v != nil:
if err := dateOverflow(minDateTime64, maxDateTime64, (*v).Time, "2006-01-02 15:04:05"); err != nil {
return nil, err
}
dt.values = append(dt.values, dt.timeToInt64((*v).Time))
default:
dt.values, nulls[i] = append(dt.values, 0), 1
}
}
default:
return nil, &ColumnConverterError{
Op: "Append",
Expand Down Expand Up @@ -153,6 +181,18 @@ func (dt *DateTime64) AppendRow(v interface{}) error {
}
datetime = dt.timeToInt64(*v)
}
case types.Datetime:
if err := dateOverflow(minDateTime64, maxDateTime64, v.Time, "2006-01-02 15:04:05"); err != nil {
return err
}
datetime = dt.timeToInt64(v.Time)
case *types.Datetime:
if v != nil {
if err := dateOverflow(minDateTime64, maxDateTime64, (*v).Time, "2006-01-02 15:04:05"); err != nil {
return err
}
datetime = dt.timeToInt64((*v).Time)
}
case nil:
default:
return &ColumnConverterError{
Expand Down
10 changes: 5 additions & 5 deletions tests/abort_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ func TestAbort(t *testing.T) {
var (
ctx = context.Background()
conn, err = proton.Open(&proton.Options{
Addr: []string{"127.0.0.1:7587"},
Addr: []string{"127.0.0.1:8463"},
Auth: proton.Auth{
Database: "default",
Username: "default",
Expand All @@ -47,23 +47,23 @@ func TestAbort(t *testing.T) {
const ddl = `
CREATE STREAM test_abort (
Col1 uint8
) Engine Memory
)
`
defer func() {
conn.Exec(ctx, "DROP STREAM test_abort")
}()
if err := conn.Exec(ctx, ddl); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_abort"); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_abort (* except _tp_time)"); assert.NoError(t, err) {
if assert.NoError(t, batch.Abort()) {
if err := batch.Abort(); assert.Error(t, err) {
assert.Equal(t, proton.ErrBatchAlreadySent, err)
}
}
}
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_abort"); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_abort (* except _tp_time)"); assert.NoError(t, err) {
if assert.NoError(t, batch.Append(uint8(1))) && assert.NoError(t, batch.Send()) {
var col1 uint8
if err := conn.QueryRow(ctx, "SELECT * FROM test_abort SETTINGS query_mode='table'").Scan(&col1); assert.NoError(t, err) {
if err := conn.QueryRow(ctx, "SELECT (* except _tp_time) FROM test_abort WHERE _tp_time > earliest_ts() LIMIT 1").Scan(&col1); assert.NoError(t, err) {
assert.Equal(t, uint8(1), col1)
}
}
Expand Down
16 changes: 8 additions & 8 deletions tests/array_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ func TestArray(t *testing.T) {
var (
ctx = context.Background()
conn, err = proton.Open(&proton.Options{
Addr: []string{"127.0.0.1:7587"},
Addr: []string{"127.0.0.1:8463"},
Auth: proton.Auth{
Database: "default",
Username: "default",
Expand All @@ -48,13 +48,13 @@ func TestArray(t *testing.T) {
Col1 array(string)
, Col2 array(array(uint32))
, Col3 array(array(array(datetime)))
) Engine Memory
)
`
defer func() {
conn.Exec(ctx, "DROP STREAM test_array")
}()
if err := conn.Exec(ctx, ddl); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_array"); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_array (* except _tp_time)"); assert.NoError(t, err) {
var (
timestamp = time.Now().Truncate(time.Second)
col1Data = []string{"A", "b", "c"}
Expand Down Expand Up @@ -91,7 +91,7 @@ func TestArray(t *testing.T) {
}
}
if assert.NoError(t, batch.Send()) {
if rows, err := conn.Query(ctx, "SELECT * FROM test_array"); assert.NoError(t, err) {
if rows, err := conn.Query(ctx, "SELECT (* except _tp_time) FROM test_array WHERE _tp_time > earliest_ts() LIMIT 1"); assert.NoError(t, err) {
for rows.Next() {
var (
col1 []string
Expand All @@ -118,7 +118,7 @@ func TestColumnarArray(t *testing.T) {
var (
ctx = context.Background()
conn, err = proton.Open(&proton.Options{
Addr: []string{"127.0.0.1:7587"},
Addr: []string{"127.0.0.1:8463"},
Auth: proton.Auth{
Database: "default",
Username: "default",
Expand All @@ -136,7 +136,7 @@ func TestColumnarArray(t *testing.T) {
Col1 array(string)
, Col2 array(array(uint32))
, Col3 array(array(array(datetime)))
) Engine Memory
)
`
defer func() {
conn.Exec(ctx, "DROP STREAM test_array")
Expand Down Expand Up @@ -183,7 +183,7 @@ func TestColumnarArray(t *testing.T) {
col3DataColArr = append(col3DataColArr, col3Data)
}

if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_array"); assert.NoError(t, err) {
if batch, err := conn.PrepareBatch(ctx, "INSERT INTO test_array (* except _tp_time)"); assert.NoError(t, err) {
if err := batch.Column(0).Append(col1DataColArr); !assert.NoError(t, err) {
return
}
Expand All @@ -194,7 +194,7 @@ func TestColumnarArray(t *testing.T) {
return
}
if assert.NoError(t, batch.Send()) {
if rows, err := conn.Query(ctx, "SELECT * FROM test_array"); assert.NoError(t, err) {
if rows, err := conn.Query(ctx, "SELECT (* except _tp_time) FROM test_array WHERE _tp_time > earliest_ts() LIMIT 1"); assert.NoError(t, err) {
for rows.Next() {
var (
col1 []string
Expand Down
Loading