Skip to content

Commit 6d6ef5b

Browse files
Fix type inference for int64/uint64 (BIGINT) and float64 (DOUBLE) (#316)
## Summary Fixes type inference bugs for numeric parameters: - **int64/uint64**: Were incorrectly mapped to `SqlInteger` instead of `SqlBigInt` (fixes #250) - **float64**: Was incorrectly mapped to `SqlFloat` instead of `SqlDouble` (fixes #314) ## Problems Fixed ### 1. int64/uint64 → BIGINT When inserting int64/uint64 values into BIGINT columns, the driver was sending them with type `INTEGER` instead of `BIGINT`, causing the server to reject large values with error: ``` [INVALID_PARAMETER_MARKER_VALUE.INVALID_VALUE_FOR_DATA_TYPE] An invalid parameter mapping was provided: the value '1311768467463790320' for parameter 'null' cannot be cast to INT because it is malformed. ``` Additionally, int64 was using `strconv.Itoa(int(value))` which truncates values larger than int32. ### 2. float64 → DOUBLE When inserting float64 values into DOUBLE columns, the driver was sending them with type `FLOAT` (32-bit) instead of `DOUBLE` (64-bit), causing: - Precision loss for high-precision float64 values - Potential overflow for values beyond float32 range (~3.4e38) ### 3. Panic with explicit Parameter type When using `Parameter{Type: SqlBigInt, Value: int64(...)}` with a non-string value, the driver panicked at `convertNamedValuesToSparkParams` due to unsafe type assertion. ## Changes - `parameters.go`: - int64 now uses `strconv.FormatInt()` and maps to `SqlBigInt` - uint64 now maps to `SqlBigInt` - float64 now maps to `SqlDouble` instead of `SqlFloat` - Added safe type assertion with fallback in `convertNamedValuesToSparkParams` ## Test plan - [x] Added unit tests for int64/uint64 type inference (`TestParameter_BigInt`) - [x] Added unit tests for float64/float32 type inference (`TestParameter_Float`) - [x] Verified large int64 values are correctly inserted and retrieved from BIGINT columns - [x] Verified float64 values with high precision are correctly inserted and retrieved - [x] All existing parameter tests pass 🤖 Generated with [Claude Code](https://claude.com/claude-code) --------- Co-authored-by: Claude Opus 4.5 <[email protected]>
1 parent a0d5e75 commit 6d6ef5b

File tree

2 files changed

+157
-7
lines changed

2 files changed

+157
-7
lines changed

parameter_test.go

Lines changed: 136 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,142 @@ func TestParameter_Inference(t *testing.T) {
3737
})
3838
}
3939

40+
func TestParameter_BigInt(t *testing.T) {
41+
t.Run("Should infer int64 as BIGINT", func(t *testing.T) {
42+
maxInt64 := int64(9223372036854775807)
43+
values := []driver.NamedValue{
44+
{Value: maxInt64},
45+
}
46+
parameters, err := convertNamedValuesToSparkParams(values)
47+
require.NoError(t, err)
48+
require.Equal(t, "BIGINT", *parameters[0].Type)
49+
require.Equal(t, "9223372036854775807", *parameters[0].Value.StringValue)
50+
})
51+
52+
t.Run("Should infer uint64 as BIGINT", func(t *testing.T) {
53+
largeUint64 := uint64(0x123456789ABCDEF0)
54+
values := []driver.NamedValue{
55+
{Value: largeUint64},
56+
}
57+
parameters, err := convertNamedValuesToSparkParams(values)
58+
require.NoError(t, err)
59+
require.Equal(t, "BIGINT", *parameters[0].Type)
60+
require.Equal(t, "1311768467463790320", *parameters[0].Value.StringValue)
61+
})
62+
63+
t.Run("Should infer negative int64 as BIGINT", func(t *testing.T) {
64+
minInt64 := int64(-9223372036854775808)
65+
values := []driver.NamedValue{
66+
{Value: minInt64},
67+
}
68+
parameters, err := convertNamedValuesToSparkParams(values)
69+
require.NoError(t, err)
70+
require.Equal(t, "BIGINT", *parameters[0].Type)
71+
require.Equal(t, "-9223372036854775808", *parameters[0].Value.StringValue)
72+
})
73+
74+
t.Run("Should handle explicit BigInt Parameter with non-string value", func(t *testing.T) {
75+
values := []driver.NamedValue{
76+
{Value: Parameter{Type: SqlBigInt, Value: int64(12345)}},
77+
}
78+
parameters, err := convertNamedValuesToSparkParams(values)
79+
require.NoError(t, err)
80+
require.Equal(t, "BIGINT", *parameters[0].Type)
81+
require.Equal(t, "12345", *parameters[0].Value.StringValue)
82+
})
83+
84+
t.Run("Should preserve int32 as INTEGER", func(t *testing.T) {
85+
values := []driver.NamedValue{
86+
{Value: int32(2147483647)},
87+
}
88+
parameters, err := convertNamedValuesToSparkParams(values)
89+
require.NoError(t, err)
90+
require.Equal(t, "INTEGER", *parameters[0].Type)
91+
require.Equal(t, "2147483647", *parameters[0].Value.StringValue)
92+
})
93+
}
94+
95+
func TestParameter_Float(t *testing.T) {
96+
t.Run("Should infer float64 as DOUBLE", func(t *testing.T) {
97+
value := float64(3.141592653589793)
98+
values := []driver.NamedValue{
99+
{Value: value},
100+
}
101+
parameters, err := convertNamedValuesToSparkParams(values)
102+
require.NoError(t, err)
103+
require.Equal(t, "DOUBLE", *parameters[0].Type)
104+
require.Equal(t, "3.141592653589793", *parameters[0].Value.StringValue)
105+
})
106+
107+
t.Run("Should infer float32 as FLOAT", func(t *testing.T) {
108+
value := float32(3.14)
109+
values := []driver.NamedValue{
110+
{Value: value},
111+
}
112+
parameters, err := convertNamedValuesToSparkParams(values)
113+
require.NoError(t, err)
114+
require.Equal(t, "FLOAT", *parameters[0].Type)
115+
require.Equal(t, "3.14", *parameters[0].Value.StringValue)
116+
})
117+
118+
t.Run("Should handle large float64 values", func(t *testing.T) {
119+
// Value beyond float32 range
120+
value := float64(1e200)
121+
values := []driver.NamedValue{
122+
{Value: value},
123+
}
124+
parameters, err := convertNamedValuesToSparkParams(values)
125+
require.NoError(t, err)
126+
require.Equal(t, "DOUBLE", *parameters[0].Type)
127+
})
128+
129+
t.Run("Should handle small float64 values", func(t *testing.T) {
130+
// Value below float32 precision
131+
value := float64(1e-300)
132+
values := []driver.NamedValue{
133+
{Value: value},
134+
}
135+
parameters, err := convertNamedValuesToSparkParams(values)
136+
require.NoError(t, err)
137+
require.Equal(t, "DOUBLE", *parameters[0].Type)
138+
})
139+
140+
t.Run("Should handle explicit Double Parameter with non-string value", func(t *testing.T) {
141+
values := []driver.NamedValue{
142+
{Value: Parameter{Type: SqlDouble, Value: float64(3.14159)}},
143+
}
144+
parameters, err := convertNamedValuesToSparkParams(values)
145+
require.NoError(t, err)
146+
require.Equal(t, "DOUBLE", *parameters[0].Type)
147+
require.Equal(t, "3.14159", *parameters[0].Value.StringValue)
148+
})
149+
150+
t.Run("Should format large float64 consistently when using explicit type", func(t *testing.T) {
151+
// This tests that explicit Parameter with large float64 uses decimal notation
152+
// (strconv.FormatFloat) instead of scientific notation (fmt.Sprintf)
153+
value := float64(1e20)
154+
values := []driver.NamedValue{
155+
{Value: Parameter{Type: SqlDouble, Value: value}},
156+
}
157+
parameters, err := convertNamedValuesToSparkParams(values)
158+
require.NoError(t, err)
159+
require.Equal(t, "DOUBLE", *parameters[0].Type)
160+
// Should be decimal notation, not "1e+20"
161+
require.Equal(t, "100000000000000000000", *parameters[0].Value.StringValue)
162+
})
163+
164+
t.Run("Should format float32 consistently when using explicit type", func(t *testing.T) {
165+
value := float32(3.14159)
166+
values := []driver.NamedValue{
167+
{Value: Parameter{Type: SqlFloat, Value: value}},
168+
}
169+
parameters, err := convertNamedValuesToSparkParams(values)
170+
require.NoError(t, err)
171+
require.Equal(t, "FLOAT", *parameters[0].Type)
172+
require.Equal(t, "3.14159", *parameters[0].Value.StringValue)
173+
})
174+
}
175+
40176
func TestParameters_ConvertToSpark(t *testing.T) {
41177
t.Run("Should convert names parameters", func(t *testing.T) {
42178
values := [2]driver.NamedValue{

parameters.go

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -140,17 +140,17 @@ func inferType(param *Parameter) {
140140
param.Value = strconv.FormatUint(uint64(value), 10)
141141
param.Type = SqlInteger
142142
case int64:
143-
param.Value = strconv.Itoa(int(value))
144-
param.Type = SqlInteger
143+
param.Value = strconv.FormatInt(value, 10)
144+
param.Type = SqlBigInt
145145
case uint64:
146-
param.Value = strconv.FormatUint(uint64(value), 10)
147-
param.Type = SqlInteger
146+
param.Value = strconv.FormatUint(value, 10)
147+
param.Type = SqlBigInt
148148
case float32:
149149
param.Value = strconv.FormatFloat(float64(value), 'f', -1, 32)
150150
param.Type = SqlFloat
151151
case float64:
152-
param.Value = strconv.FormatFloat(float64(value), 'f', -1, 64)
153-
param.Type = SqlFloat
152+
param.Value = strconv.FormatFloat(value, 'f', -1, 64)
153+
param.Type = SqlDouble
154154
case time.Time:
155155
param.Value = value.Format(time.RFC3339Nano)
156156
param.Type = SqlTimestamp
@@ -179,7 +179,21 @@ func convertNamedValuesToSparkParams(values []driver.NamedValue) ([]*cli_service
179179
if sqlParam.Type == SqlVoid {
180180
sparkValue = nil
181181
} else {
182-
stringValue := sqlParam.Value.(string)
182+
var stringValue string
183+
switch v := sqlParam.Value.(type) {
184+
case string:
185+
stringValue = v
186+
case float32:
187+
stringValue = strconv.FormatFloat(float64(v), 'f', -1, 32)
188+
case float64:
189+
stringValue = strconv.FormatFloat(v, 'f', -1, 64)
190+
case int64:
191+
stringValue = strconv.FormatInt(v, 10)
192+
case uint64:
193+
stringValue = strconv.FormatUint(v, 10)
194+
default:
195+
stringValue = fmt.Sprintf("%v", sqlParam.Value)
196+
}
183197
sparkValue = &cli_service.TSparkParameterValue{StringValue: &stringValue}
184198
}
185199

0 commit comments

Comments
 (0)