Skip to content
Merged
2 changes: 1 addition & 1 deletion src/snowflake/snowpark/_internal/type_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def convert_sf_to_sp_type(
if column_type_name == "REAL":
return DoubleType()
if (column_type_name == "FIXED" or column_type_name == "NUMBER") and scale == 0:
return LongType()
return LongType(precision=precision, scale=scale)
raise NotImplementedError(
"Unsupported type: {}, precision: {}, scale: {}".format(
column_type_name, precision, scale
Expand Down
4 changes: 3 additions & 1 deletion src/snowflake/snowpark/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,9 @@ def _fill_ast(self, ast: proto.DataType) -> None:


class _NumericType(_AtomicType):
pass
def __init__(self, **kwargs) -> None:
self._precision = kwargs.get("precision", None)
self._scale = kwargs.get("scale", None)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
def __init__(self, **kwargs) -> None:
self._precision = kwargs.get("precision", None)
self._scale = kwargs.get("scale", None)
def __init__(self, precision: int | None = None, scale: int | None = None) -> None:
self._precision = precision
self._scale = scale

Can we do this style instead to make it more explicit?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is a internal only feature, I think we don't want to expose this information to customer?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK, we can just keep it as kwargs then.

What do you think about moving the arguments onto _IntegralType instead of _NumericType? It looks like Snowflake only has a single precision for FLOAT and DECIMAL (docs), so we should never have to expose precision information for those types.

Copy link
Contributor

@sfc-gh-aling sfc-gh-aling Dec 5, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

+1 on using _IntegralType, I don't think we need _scale as it would always be 0

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i see, will fix this



class TimestampTimeZone(Enum):
Expand Down
13 changes: 13 additions & 0 deletions tests/integ/test_datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -408,3 +408,16 @@ def test_join_basic(session):
]
)
)


def test_numeric_type_store_precision_and_scale(session):
table_name = Utils.random_table_name()
df = session.create_dataframe(
[Decimal("9" * 38)],
StructType([StructField("large_value", DecimalType(38, 0), True)]),
)
df.write.save_as_table(table_name, mode="overwrite", table_type="temp")
result = session.sql(f"select * from {table_name}")
datatype = result.schema.fields[0].datatype
assert isinstance(datatype, LongType)
assert datatype._precision == 38 and datatype._scale == 0
Loading