diff --git a/.github/workflows/parameters/parameters_AWS.json.gpg b/.github/workflows/parameters/parameters_AWS.json.gpg index e897ec5fe..778cca91d 100644 Binary files a/.github/workflows/parameters/parameters_AWS.json.gpg and b/.github/workflows/parameters/parameters_AWS.json.gpg differ diff --git a/.github/workflows/parameters/parameters_AZURE.json.gpg b/.github/workflows/parameters/parameters_AZURE.json.gpg index 7e4dbaf55..687ec4329 100644 --- a/.github/workflows/parameters/parameters_AZURE.json.gpg +++ b/.github/workflows/parameters/parameters_AZURE.json.gpg @@ -1 +1,2 @@ -  V&|@USҹ'jM*,#঍Q^s(QZNl,3xGZ&L] 64+@2-EFȇ#~mJQti)kf/gBXd66=fdMR% *?dG~'u8Px(BibD4"gV ;mU+ %6UPJuw H'9v㰅WB} 9 A"{ ég) Ysa \ No newline at end of file +  P&VXS}jV;0Dssx(G 0nPUŴ‰Obpw3cHi6W<ᄑU^K1xTbiAmvu2.+ʝ.f*\ VRY/ti}?!@Er}c,..pfkNy I[;K?divx&8X%NW+,[>3` HscFI +}vźmlc"A4 \ No newline at end of file diff --git a/.github/workflows/parameters/parameters_GCP.json.gpg b/.github/workflows/parameters/parameters_GCP.json.gpg index ea00564b7..5dc80fd7d 100644 Binary files a/.github/workflows/parameters/parameters_GCP.json.gpg and b/.github/workflows/parameters/parameters_GCP.json.gpg differ diff --git a/README.md b/README.md index ed8b8341c..0378b5416 100644 --- a/README.md +++ b/README.md @@ -104,6 +104,14 @@ Snowflake data types and their .NET types is covered in: [Data Types and Data Fo How execute a query, use query bindings, run queries synchronously and asynchronously: [Running Queries and Reading Results](doc/QueryingData.md) +## Structured types + +Using structured types: [Structured types](doc/StructuredTypes.md) + +## Vector type + +Using vector type: [Vector type](doc/VectorType.md) + ## Stage Files Using stage files within PUT/GET commands: diff --git a/Snowflake.Data.Tests/IntegrationTests/SFBindTestIT.cs b/Snowflake.Data.Tests/IntegrationTests/SFBindTestIT.cs index 08da1cecf..05995e0d4 100755 --- a/Snowflake.Data.Tests/IntegrationTests/SFBindTestIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/SFBindTestIT.cs @@ -1,6 +1,7 @@ /* * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ +#nullable enable using System; using System.Data; @@ -87,7 +88,7 @@ public void TestBindNullValue() foreach (DbType type in Enum.GetValues(typeof(DbType))) { bool isTypeSupported = true; - string colName = null; + string colName; using (IDbCommand command = dbConnection.CreateCommand()) { var param = command.CreateParameter(); @@ -226,7 +227,7 @@ public void TestBindValue() foreach (DbType type in Enum.GetValues(typeof(DbType))) { bool isTypeSupported = true; - string colName = null; + string colName; using (IDbCommand command = dbConnection.CreateCommand()) { var param = command.CreateParameter(); @@ -361,7 +362,7 @@ public void TestBindValueWithSFDataType() dbConnection.Open(); foreach (SFDataType type in Enum.GetValues(typeof(SFDataType))) { - if (!type.Equals(SFDataType.None) && !type.Equals(SFDataType.MAP)) + if (!type.Equals(SFDataType.None) && !type.Equals(SFDataType.MAP) && !type.Equals(SFDataType.VECTOR)) { bool isTypeSupported = true; string[] columns; @@ -649,6 +650,7 @@ public void TestPutArrayBind() var count = cmd.ExecuteNonQuery(); Assert.AreEqual(total * 3, count); + cmd.Parameters.Clear(); cmd.CommandText = $"SELECT * FROM {TableName}"; IDataReader reader = cmd.ExecuteReader(); Assert.IsTrue(reader.Read()); @@ -884,13 +886,20 @@ public void TestExplicitDbTypeAssignmentForArrayValue() [TestCase(ResultFormat.ARROW, SFTableType.Iceberg, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, null)] */ // Session TimeZone cases - [TestCase(ResultFormat.ARROW, SFTableType.Standard, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, "Europe/Warsaw")] + [TestCase(ResultFormat.JSON, SFTableType.Standard, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, "Europe/Warsaw")] [TestCase(ResultFormat.JSON, SFTableType.Standard, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, "Asia/Tokyo")] + [TestCase(ResultFormat.ARROW, SFTableType.Standard, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, "Europe/Warsaw")] + [TestCase(ResultFormat.ARROW, SFTableType.Standard, SFDataType.TIMESTAMP_LTZ, 6, DbType.DateTimeOffset, FormatYmdHmsZ, "Asia/Tokyo")] public void TestDateTimeBinding(ResultFormat resultFormat, SFTableType tableType, SFDataType columnType, Int32? columnPrecision, DbType bindingType, string comparisonFormat, string timeZone) { // Arrange - var timestamp = "2023/03/15 13:17:29.207 +05:00"; // 08:17:29.207 UTC - var expected = ExpectedTimestampWrapper.From(timestamp, columnType); + string[] timestamps = + { + "2023/03/15 13:17:29.207 +05:00", + "9999/12/30 23:24:25.987 +07:00", + "0001/01/02 02:06:07.000 -04:00" + }; + var expected = ExpectedTimestampWrapper.From(timestamps, columnType); var columnWithPrecision = ColumnTypeWithPrecision(columnType, columnPrecision); var testCase = $"ResultFormat={resultFormat}, TableType={tableType}, ColumnType={columnWithPrecision}, BindingType={bindingType}, ComparisonFormat={comparisonFormat}"; var bindingThreshold = 65280; // when exceeded enforces bindings via file on stage @@ -906,24 +915,34 @@ public void TestDateTimeBinding(ResultFormat resultFormat, SFTableType tableType if (!timeZone.IsNullOrEmpty()) // Driver ignores this setting and relies on local environment timezone conn.ExecuteNonQuery($"alter session set TIMEZONE = '{timeZone}'"); + // prepare initial column + var columns = new List { "id number(10,0) not null primary key" }; + var sql_columns = "id"; + var sql_values = "?"; + + // prepare additional columns + for (int i = 1; i <= timestamps.Length; ++i) + { + columns.Add($"ts_{i} {columnWithPrecision}"); + sql_columns += $",ts_{i}"; + sql_values += ",?"; + } + CreateOrReplaceTable(conn, TableName, tableType.TableDDLCreationPrefix(), - new[] { - "id number(10,0) not null primary key", // necessary only for HYBRID tables - $"ts {columnWithPrecision}" - }, + columns, tableType.TableDDLCreationFlags()); // Act+Assert - var sqlInsert = $"insert into {TableName} (id, ts) values (?, ?)"; + var sqlInsert = $"insert into {TableName} ({sql_columns}) values ({sql_values})"; InsertSingleRecord(conn, sqlInsert, bindingType, 1, expected); InsertMultipleRecords(conn, sqlInsert, bindingType, 2, expected, smallBatchRowCount, false); InsertMultipleRecords(conn, sqlInsert, bindingType, smallBatchRowCount+2, expected, bigBatchRowCount, true); // Assert var row = 0; - using (var select = conn.CreateCommand($"select id, ts from {TableName} order by id")) + using (var select = conn.CreateCommand($"select {sql_columns} from {TableName} order by id")) { s_logger.Debug(select.CommandText); var reader = select.ExecuteReader(); @@ -932,7 +951,11 @@ public void TestDateTimeBinding(ResultFormat resultFormat, SFTableType tableType ++row; string faultMessage = $"Mismatch for row: {row}, {testCase}"; Assert.AreEqual(row, reader.GetInt32(0)); - expected.AssertEqual(reader.GetValue(1), comparisonFormat, faultMessage); + + for (int i = 0; i < timestamps.Length; ++i) + { + expected.AssertEqual(reader.GetValue(i + 1), comparisonFormat, faultMessage, i); + } } } Assert.AreEqual(1+smallBatchRowCount+bigBatchRowCount, row); @@ -947,12 +970,24 @@ private void InsertSingleRecord(IDbConnection conn, string sqlInsert, DbType bin insert.Add("1", DbType.Int32, identifier); if (ExpectedTimestampWrapper.IsOffsetType(ts.ExpectedColumnType())) { - var parameter = (SnowflakeDbParameter)insert.Add("2", binding, ts.GetDateTimeOffset()); - parameter.SFDataType = ts.ExpectedColumnType(); + var dateTimeOffsets = ts.GetDateTimeOffsets(); + for (int i = 0; i < dateTimeOffsets.Length; ++i) + { + var parameterName = (i + 2).ToString(); + var parameterValue = dateTimeOffsets[i]; + var parameter = insert.Add(parameterName, binding, parameterValue); + parameter.SFDataType = ts.ExpectedColumnType(); + } } else { - insert.Add("2", binding, ts.GetDateTime()); + var dateTimes = ts.GetDateTimes(); + for (int i = 0; i < dateTimes.Length; ++i) + { + var parameterName = (i + 2).ToString(); + var parameterValue = dateTimes[i]; + insert.Add(parameterName, binding, parameterValue); + } } // Act @@ -973,12 +1008,25 @@ private void InsertMultipleRecords(IDbConnection conn, string sqlInsert, DbType insert.Add("1", DbType.Int32, Enumerable.Range(initialIdentifier, rowsCount).ToArray()); if (ExpectedTimestampWrapper.IsOffsetType(ts.ExpectedColumnType())) { - var parameter = (SnowflakeDbParameter)insert.Add("2", binding, Enumerable.Repeat(ts.GetDateTimeOffset(), rowsCount).ToArray()); - parameter.SFDataType = ts.ExpectedColumnType(); + var dateTimeOffsets = ts.GetDateTimeOffsets(); + for (int i = 0; i < dateTimeOffsets.Length; ++i) + { + var parameterName = (i + 2).ToString(); + var parameterValue = Enumerable.Repeat(dateTimeOffsets[i], rowsCount).ToArray(); + var parameter = insert.Add(parameterName, binding, parameterValue); + parameter.SFDataType = ts.ExpectedColumnType(); + } + } else { - insert.Add("2", binding, Enumerable.Repeat(ts.GetDateTime(), rowsCount).ToArray()); + var dateTimes = ts.GetDateTimes(); + for (int i = 0; i < dateTimes.Length; ++i) + { + var parameterName = (i + 2).ToString(); + var parameterValue = Enumerable.Repeat(dateTimes[i], rowsCount).ToArray(); + insert.Add(parameterName, binding, parameterValue); + } } // Act @@ -1001,57 +1049,66 @@ private static string ColumnTypeWithPrecision(SFDataType columnType, Int32? colu class ExpectedTimestampWrapper { private readonly SFDataType _columnType; - private readonly DateTime? _expectedDateTime; - private readonly DateTimeOffset? _expectedDateTimeOffset; + private readonly DateTime[]? _expectedDateTimes; + private readonly DateTimeOffset[]? _expectedDateTimeOffsets; - internal static ExpectedTimestampWrapper From(string timestampWithTimeZone, SFDataType columnType) + internal static ExpectedTimestampWrapper From(string[] timestampsWithTimeZone, SFDataType columnType) { if (IsOffsetType(columnType)) { - var dateTimeOffset = DateTimeOffset.ParseExact(timestampWithTimeZone, "yyyy/MM/dd HH:mm:ss.fff zzz", CultureInfo.InvariantCulture); - return new ExpectedTimestampWrapper(dateTimeOffset, columnType); + var dateTimeOffsets = + timestampsWithTimeZone + .Select(ts => DateTimeOffset.ParseExact(ts, "yyyy/MM/dd HH:mm:ss.fff zzz", CultureInfo.InvariantCulture)) + .ToArray(); + return new ExpectedTimestampWrapper(dateTimeOffsets, columnType); } - var dateTime = DateTime.ParseExact(timestampWithTimeZone, "yyyy/MM/dd HH:mm:ss.fff zzz", CultureInfo.InvariantCulture); - return new ExpectedTimestampWrapper(dateTime, columnType); + var dateTimes = + timestampsWithTimeZone + .Select(ts => DateTime.ParseExact(ts, "yyyy/MM/dd HH:mm:ss.fff zzz", CultureInfo.InvariantCulture)) + .ToArray(); + + return new ExpectedTimestampWrapper(dateTimes, columnType); } - private ExpectedTimestampWrapper(DateTime dateTime, SFDataType columnType) + private ExpectedTimestampWrapper(DateTime[] dateTimes, SFDataType columnType) { - _expectedDateTime = dateTime; - _expectedDateTimeOffset = null; + _expectedDateTimes = dateTimes; + _expectedDateTimeOffsets = null; _columnType = columnType; } - private ExpectedTimestampWrapper(DateTimeOffset dateTimeOffset, SFDataType columnType) + private ExpectedTimestampWrapper(DateTimeOffset[] dateTimeOffsets, SFDataType columnType) { - _expectedDateTimeOffset = dateTimeOffset; - _expectedDateTime = null; + _expectedDateTimeOffsets = dateTimeOffsets; + _expectedDateTimes = null; _columnType = columnType; } internal SFDataType ExpectedColumnType() => _columnType; - internal void AssertEqual(object actual, string comparisonFormat, string faultMessage) + internal void AssertEqual(object actual, string comparisonFormat, string faultMessage, int index) { switch (_columnType) { case SFDataType.TIMESTAMP_TZ: - Assert.AreEqual(GetDateTimeOffset().ToString(comparisonFormat), ((DateTimeOffset)actual).ToString(comparisonFormat), faultMessage); + Assert.AreEqual(GetDateTimeOffsets()[index].ToString(comparisonFormat), ((DateTimeOffset)actual).ToString(comparisonFormat), faultMessage); break; case SFDataType.TIMESTAMP_LTZ: - Assert.AreEqual(GetDateTimeOffset().ToUniversalTime().ToString(comparisonFormat), ((DateTimeOffset)actual).ToUniversalTime().ToString(comparisonFormat), faultMessage); + Assert.AreEqual(GetDateTimeOffsets()[index].ToUniversalTime().ToString(comparisonFormat), ((DateTimeOffset)actual).ToUniversalTime().ToString(comparisonFormat), faultMessage); break; default: - Assert.AreEqual(GetDateTime().ToString(comparisonFormat), ((DateTime)actual).ToString(comparisonFormat), faultMessage); + Assert.AreEqual(GetDateTimes()[index].ToString(comparisonFormat), ((DateTime)actual).ToString(comparisonFormat), faultMessage); break; } } - internal DateTime GetDateTime() => _expectedDateTime ?? throw new Exception($"Column {_columnType} is not matching the expected value type {typeof(DateTime)}"); + internal DateTime[] GetDateTimes() => _expectedDateTimes ?? throw new Exception($"Column {_columnType} is not matching the expected value type {typeof(DateTime)}"); - internal DateTimeOffset GetDateTimeOffset() => _expectedDateTimeOffset ?? throw new Exception($"Column {_columnType} is not matching the expected value type {typeof(DateTime)}"); + internal DateTimeOffset[] GetDateTimeOffsets() => _expectedDateTimeOffsets ?? throw new Exception($"Column {_columnType} is not matching the expected value type {typeof(DateTime)}"); internal static bool IsOffsetType(SFDataType type) => type == SFDataType.TIMESTAMP_LTZ || type == SFDataType.TIMESTAMP_TZ; } } + +#nullable restore diff --git a/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderGetEnumeratorIT.cs b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderGetEnumeratorIT.cs new file mode 100755 index 000000000..88e25256e --- /dev/null +++ b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderGetEnumeratorIT.cs @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. + */ + +using System; +using System.Linq; +using System.Data.Common; +using System.Data; +using System.Globalization; +using System.Text; +using NUnit.Framework; +using Snowflake.Data.Client; +using Snowflake.Data.Core; +using Snowflake.Data.Tests.Util; + +namespace Snowflake.Data.Tests.IntegrationTests +{ + [TestFixture(ResultFormat.ARROW)] + [TestFixture(ResultFormat.JSON)] + class SFDbDataReaderGetEnumeratorIT : SFBaseTest + { + protected override string TestName => base.TestName + _resultFormat; + + private readonly ResultFormat _resultFormat; + + public SFDbDataReaderGetEnumeratorIT(ResultFormat resultFormat) + { + _resultFormat = resultFormat; + } + + [Test] + public void TestGetEnumerator() + { + using (var conn = CreateAndOpenConnection()) + { + CreateAndPopulateTestTable(conn); + + string selectCommandText = $"select * from {TableName}"; + IDbCommand selectCmd = conn.CreateCommand(); + selectCmd.CommandText = selectCommandText; + DbDataReader reader = selectCmd.ExecuteReader() as DbDataReader; + + var enumerator = reader.GetEnumerator(); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual(3, (enumerator.Current as DbDataRecord).GetInt64(0)); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual(5, (enumerator.Current as DbDataRecord).GetInt64(0)); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual(8, (enumerator.Current as DbDataRecord).GetInt64(0)); + Assert.IsFalse(enumerator.MoveNext()); + + reader.Close(); + + DropTestTableAndCloseConnection(conn); + } + } + + [Test] + public void TestGetEnumeratorShouldBeEmptyWhenNotRowsReturned() + { + using (var conn = CreateAndOpenConnection()) + { + CreateAndPopulateTestTable(conn); + + string selectCommandText = $"select * from {TableName} WHERE cola > 10"; + IDbCommand selectCmd = conn.CreateCommand(); + selectCmd.CommandText = selectCommandText; + DbDataReader reader = selectCmd.ExecuteReader() as DbDataReader; + + var enumerator = reader.GetEnumerator(); + Assert.IsFalse(enumerator.MoveNext()); + Assert.IsNull(enumerator.Current); + + reader.Close(); + DropTestTableAndCloseConnection(conn); + } + } + + [Test] + public void TestGetEnumeratorWithCastMethod() + { + using (var conn = CreateAndOpenConnection()) + { + CreateAndPopulateTestTable(conn); + + string selectCommandText = $"select * from {TableName}"; + IDbCommand selectCmd = conn.CreateCommand(); + selectCmd.CommandText = selectCommandText; + DbDataReader reader = selectCmd.ExecuteReader() as DbDataReader; + + var dataRecords = reader.Cast().ToList(); + Assert.AreEqual(3, dataRecords.Count); + + reader.Close(); + + DropTestTableAndCloseConnection(conn); + } + } + + [Test] + public void TestGetEnumeratorForEachShouldNotEnterWhenResultsIsEmpty() + { + using (var conn = CreateAndOpenConnection()) + { + CreateAndPopulateTestTable(conn); + + string selectCommandText = $"select * from {TableName} WHERE cola > 10"; + IDbCommand selectCmd = conn.CreateCommand(); + selectCmd.CommandText = selectCommandText; + DbDataReader reader = selectCmd.ExecuteReader() as DbDataReader; + + foreach (var record in reader) + { + Assert.Fail("Should not enter when results is empty"); + } + + reader.Close(); + DropTestTableAndCloseConnection(conn); + } + } + + [Test] + public void TestGetEnumeratorShouldThrowNonSupportedExceptionWhenReset() + { + using (var conn = CreateAndOpenConnection()) + { + CreateAndPopulateTestTable(conn); + + string selectCommandText = $"select * from {TableName}"; + IDbCommand selectCmd = conn.CreateCommand(); + selectCmd.CommandText = selectCommandText; + DbDataReader reader = selectCmd.ExecuteReader() as DbDataReader; + + var enumerator = reader.GetEnumerator(); + Assert.IsTrue(enumerator.MoveNext()); + + Assert.Throws(() => enumerator.Reset()); + + reader.Close(); + + DropTestTableAndCloseConnection(conn); + } + } + + private void DropTestTableAndCloseConnection(DbConnection conn) + { + IDbCommand cmd = conn.CreateCommand(); + cmd.CommandText = $"drop table if exists {TableName}"; + var count = cmd.ExecuteNonQuery(); + Assert.AreEqual(0, count); + + CloseConnection(conn); + } + + private void CreateAndPopulateTestTable(DbConnection conn) + { + CreateOrReplaceTable(conn, TableName, new []{"cola NUMBER"}); + + var cmd = conn.CreateCommand(); + + string insertCommand = $"insert into {TableName} values (3),(5),(8)"; + cmd.CommandText = insertCommand; + cmd.ExecuteNonQuery(); + } + + private DbConnection CreateAndOpenConnection() + { + var conn = new SnowflakeDbConnection(ConnectionString); + conn.Open(); + SessionParameterAlterer.SetResultFormat(conn, _resultFormat); + return conn; + } + + private void CloseConnection(DbConnection conn) + { + SessionParameterAlterer.RestoreResultFormat(conn); + conn.Close(); + } + } +} diff --git a/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs index b0e555185..c6952f84a 100755 --- a/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs @@ -20,14 +20,14 @@ namespace Snowflake.Data.Tests.IntegrationTests class SFDbDataReaderIT : SFBaseTest { protected override string TestName => base.TestName + _resultFormat; - + private readonly ResultFormat _resultFormat; - + public SFDbDataReaderIT(ResultFormat resultFormat) { _resultFormat = resultFormat; } - + private void ValidateResultFormat(IDataReader reader) { Assert.AreEqual(_resultFormat, ((SnowflakeDbDataReader)reader).ResultFormat); @@ -39,7 +39,7 @@ public void TestRecordsAffected() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola NUMBER"}); - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (1),(1),(1)"; @@ -67,7 +67,7 @@ public void TestGetNumber() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola NUMBER"}); - + IDbCommand cmd = conn.CreateCommand(); int numInt = 10000; @@ -114,7 +114,7 @@ public void TestGetNumber() Assert.IsFalse(reader.Read()); reader.Close(); - + CloseConnection(conn); } @@ -152,9 +152,9 @@ public void TestGetDouble() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(numFloat, reader.GetFloat(0)); Assert.AreEqual((decimal)numFloat, reader.GetDecimal(0)); @@ -235,7 +235,7 @@ public void TestGetTime(string inputTimeStr, int? precision) [TestCase("11:22:33.4455667")] [TestCase("23:59:59.9999999")] [TestCase("16:20:00.6666666")] - [TestCase("00:00:00.0000000")] + [TestCase("00:00:00.0000000")] [TestCase("00:00:00")] [TestCase("23:59:59.1")] [TestCase("23:59:59.12")] @@ -284,7 +284,7 @@ public void TestGetTimeSpan(string inputTimeStr) Assert.AreEqual(dateTimeTime.Minute, timeSpanTime.Minutes); Assert.AreEqual(dateTimeTime.Second, timeSpanTime.Seconds); Assert.AreEqual(dateTimeTime.Millisecond, timeSpanTime.Milliseconds); - + CloseConnection(conn); } } @@ -336,7 +336,7 @@ public void TestGetTimeSpanError() IDataReader reader = cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // All types except TIME fail conversion when calling GetTimeSpan @@ -344,19 +344,19 @@ public void TestGetTimeSpanError() { try { - + ((SnowflakeDbDataReader)reader).GetTimeSpan(i); Assert.Fail("Data should not be converted to TIME"); } catch (SnowflakeDbException e) { - Assert.AreEqual(270003, e.ErrorCode); + Assert.AreEqual(270003, e.ErrorCode); } } // Null value // Null value can not be converted to TimeSpan because it is a non-nullable type - + try { ((SnowflakeDbDataReader)reader).GetTimeSpan(12); @@ -371,7 +371,7 @@ public void TestGetTimeSpanError() TimeSpan timeSpanTime = ((SnowflakeDbDataReader)reader).GetTimeSpan(13); reader.Close(); - + CloseConnection(conn); } } @@ -425,9 +425,9 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // For time, we getDateTime on the column and ignore date part @@ -435,7 +435,7 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp if (dataType == SFDataType.DATE) { - Assert.AreEqual(inputTime.Date, reader.GetDateTime(0)); + Assert.AreEqual(inputTime.Date, actualTime); Assert.AreEqual(inputTime.Date.ToString("yyyy-MM-dd"), reader.GetString(0)); } if (dataType != SFDataType.DATE) @@ -449,14 +449,17 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp { if (precision == 9) { - Assert.AreEqual(inputTime, reader.GetDateTime(0)); + Assert.AreEqual(inputTime, actualTime); } else { - Assert.AreEqual(inputTime.Date, reader.GetDateTime(0).Date); + Assert.AreEqual(inputTime.Date, actualTime.Date); } } + // DATE, TIME and TIMESTAMP_NTZ should be returned with DateTimeKind.Unspecified + Assert.AreEqual(DateTimeKind.Unspecified, actualTime.Kind); + reader.Close(); CloseConnection(conn); @@ -495,9 +498,9 @@ public void TestGetTimestampTZ(int timezoneOffsetInHours) using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola TIMESTAMP_TZ"}); - + DateTimeOffset now = DateTimeOffset.Now.ToOffset(TimeSpan.FromHours(timezoneOffsetInHours)); - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (?)"; @@ -514,9 +517,9 @@ public void TestGetTimestampTZ(int timezoneOffsetInHours) cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); DateTimeOffset dtOffset = (DateTimeOffset)reader.GetValue(0); reader.Close(); @@ -535,9 +538,9 @@ public void TestGetTimestampLTZ() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola TIMESTAMP_LTZ"}); - + DateTimeOffset now = DateTimeOffset.Now; - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (?)"; @@ -555,9 +558,9 @@ public void TestGetTimestampLTZ() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); DateTimeOffset dtOffset = (DateTimeOffset)reader.GetValue(0); reader.Close(); @@ -592,9 +595,9 @@ public void TestGetBoolean([Values]bool value) cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(value, reader.GetBoolean(0)); reader.Close(); @@ -655,18 +658,18 @@ public void TestGetBinary() "col2 VARCHAR(50)", "col3 DOUBLE" }); - + byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARAY"); string testChars = "TEST_GET_CHARS"; double testDouble = 1.2345678; string insertCommand = $"insert into {TableName} values (?, '{testChars}',{testDouble.ToString()})"; IDbCommand cmd = conn.CreateCommand(); cmd.CommandText = insertCommand; - + var p1 = cmd.CreateParameter(); p1.ParameterName = "1"; p1.DbType = DbType.Binary; - p1.Value = testBytes; + p1.Value = testBytes; cmd.Parameters.Add(p1); var count = cmd.ExecuteNonQuery(); @@ -674,9 +677,9 @@ public void TestGetBinary() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion Assert.IsTrue(testBytes.SequenceEqual((byte[])reader.GetValue(0))); @@ -714,7 +717,7 @@ public void TestGetBinary() Assert.AreEqual(read, toReadLength); Assert.IsTrue(testSubBytes.SequenceEqual(sub)); - // Read subset 'GET_BINARAY' from actual 'TEST_GET_BINARAY' data + // Read subset 'GET_BINARAY' from actual 'TEST_GET_BINARAY' data // and copy inside existing buffer replacing Xs toReadLength = 11; byte[] testSubBytesWithTargetOffset = Encoding.UTF8.GetBytes("OFFSET GET_BINARAY EXTRA"); @@ -731,7 +734,7 @@ public void TestGetBinary() //** Invalid data offsets **/ try { - // Data offset > data length + // Data offset > data length reader.GetBytes(0, 25, sub, 7, toReadLength); Assert.Fail(); } @@ -754,7 +757,7 @@ public void TestGetBinary() //** Invalid buffer offsets **// try { - // Buffer offset > buffer length + // Buffer offset > buffer length reader.GetBytes(0, 6, sub, 25, toReadLength); Assert.Fail(); } @@ -775,7 +778,7 @@ public void TestGetBinary() } //** Null buffer **// - // If null, this method returns the size required of the array in order to fit all + // If null, this method returns the size required of the array in order to fit all // of the specified data. read = reader.GetBytes(0, 6, null, 0, toReadLength); Assert.AreEqual(testBytes.Length, read); @@ -828,7 +831,7 @@ public void TestGetChars() "col2 BINARY", "col3 DOUBLE" }); - + string testChars = "TEST_GET_CHARS"; byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARY"); double testDouble = 1.2345678; @@ -849,7 +852,7 @@ public void TestGetChars() IDataReader reader = cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion Assert.IsTrue(testChars.Equals(reader.GetValue(0))); @@ -889,7 +892,7 @@ public void TestGetChars() Assert.IsTrue(testSubChars.SequenceEqual(sub)); - // Read subset 'GET_CHARS' from actual 'TEST_GET_CHARS' data + // Read subset 'GET_CHARS' from actual 'TEST_GET_CHARS' data // and copy inside existing buffer replacing Xs char[] testSubCharsWithTargetOffset = "OFFSET GET_CHARS EXTRA".ToArray(); toReadLength = 9; @@ -906,7 +909,7 @@ public void TestGetChars() //** Invalid data offsets **// try { - // Data offset > data length + // Data offset > data length reader.GetChars(0, 25, sub, 7, toReadLength); Assert.Fail(); } @@ -929,7 +932,7 @@ public void TestGetChars() //** Invalid buffer offsets **// try { - // Buffer offset > buffer length + // Buffer offset > buffer length reader.GetChars(0, 6, sub, 25, toReadLength); Assert.Fail(); } @@ -950,7 +953,7 @@ public void TestGetChars() } //** Null buffer **// - // If null, this method returns the size required of the array in order to fit all + // If null, this method returns the size required of the array in order to fit all // of the specified data. read = reader.GetChars(0, 6, null, 0, toReadLength); Assert.AreEqual(testChars.Length, read); @@ -1016,7 +1019,7 @@ public void TestGetStream() "col2 BINARY", "col3 DOUBLE" }); - + string testChars = "TEST_GET_CHARS"; byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARY"); double testDouble = 1.2345678; @@ -1037,7 +1040,7 @@ public void TestGetStream() DbDataReader reader = (DbDataReader) cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion @@ -1087,9 +1090,9 @@ public void TestGetValueIndexOutOfBound() IDbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); try @@ -1128,7 +1131,7 @@ public void TestBasicDataReader() using (IDataReader reader = cmd.ExecuteReader()) { ValidateResultFormat(reader); - + Assert.AreEqual(2, reader.FieldCount); Assert.AreEqual(0, reader.Depth); Assert.IsTrue(((SnowflakeDbDataReader)reader).HasRows); @@ -1151,7 +1154,7 @@ public void TestBasicDataReader() reader.Close(); Assert.IsTrue(reader.IsClosed); - + try { reader.Read(); @@ -1199,7 +1202,7 @@ public void TestReadOutNullVal() using (IDataReader reader = cmd.ExecuteReader()) { ValidateResultFormat(reader); - + reader.Read(); object nullVal = reader.GetValue(0); Assert.AreEqual(DBNull.Value, nullVal); @@ -1211,7 +1214,7 @@ public void TestReadOutNullVal() } CloseConnection(conn); - } + } } [Test] @@ -1238,9 +1241,9 @@ public void TestGetGuid() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(val, reader.GetGuid(0)); @@ -1302,7 +1305,7 @@ public void TestCopyCmdResultSet() cmd.CommandText = $"create or replace stage {stageName}"; cmd.ExecuteNonQuery(); - cmd.CommandText = $"copy into {TableName} from @{stageName}"; + cmd.CommandText = $"copy into {TableName} from @{stageName}"; using (var rdr = cmd.ExecuteReader()) { // Can read the first row @@ -1433,7 +1436,7 @@ public void TestResultSetMetadata() CloseConnection(conn); } } - + [Test] public void TestHasRows() { @@ -1441,9 +1444,9 @@ public void TestHasRows() { DbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1 where 1=2"; - + DbDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); Assert.IsFalse(reader.HasRows); @@ -1451,7 +1454,7 @@ public void TestHasRows() CloseConnection(conn); } } - + [Test] public void TestHasRowsMultiStatement() { @@ -1460,15 +1463,15 @@ public void TestHasRowsMultiStatement() DbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1;" + "select 1 where 1=2;" + - "select 1;" + + "select 1;" + "select 1 where 1=2;"; - + DbParameter param = cmd.CreateParameter(); param.ParameterName = "MULTI_STATEMENT_COUNT"; param.DbType = DbType.Int16; param.Value = 4; cmd.Parameters.Add(param); - + DbDataReader reader = cmd.ExecuteReader(); // multi statements are always returned in JSON @@ -1483,7 +1486,7 @@ public void TestHasRowsMultiStatement() // select 1 where 1=2 Assert.IsFalse(reader.HasRows); reader.NextResult(); - + // select 1 Assert.IsTrue(reader.HasRows); reader.Read(); @@ -1494,12 +1497,12 @@ public void TestHasRowsMultiStatement() Assert.IsFalse(reader.HasRows); reader.NextResult(); Assert.IsFalse(reader.HasRows); - + reader.Close(); CloseConnection(conn); } } - + [Test] [TestCase("99")] // Int8 [TestCase("9.9")] // Int8 + scale @@ -1564,23 +1567,23 @@ public void TestTimestampTz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_TZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTimeOffset.Parse(testValue); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } - + [Test] [TestCase("2019-01-01 12:12:12.1234567 +0500", 7)] [TestCase("2019-01-01 12:12:12.1234567 +1400", 7)] @@ -1591,23 +1594,23 @@ public void TestTimestampLtz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_LTZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTimeOffset.Parse(testValue).ToLocalTime(); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } - + [Test] [TestCase("2019-01-01 12:12:12.1234567", 7)] [TestCase("0001-01-01 00:00:00.0000000", 9)] @@ -1617,19 +1620,19 @@ public void TestTimestampNtz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_NTZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTime.Parse(testValue); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } diff --git a/Snowflake.Data.Tests/IntegrationTests/StructuredArraysIT.cs b/Snowflake.Data.Tests/IntegrationTests/StructuredArraysIT.cs index afc31ea54..aee5e666e 100644 --- a/Snowflake.Data.Tests/IntegrationTests/StructuredArraysIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/StructuredArraysIT.cs @@ -243,6 +243,31 @@ public void TestSelectArrayOfDoubles() } } + [Test] + public void TestSelectArrayOfDoublesWithExponentNotation() + { + // arrange + using (var connection = new SnowflakeDbConnection(ConnectionString)) + { + connection.Open(); + using (var command = connection.CreateCommand()) + { + EnableStructuredTypes(connection); + var arrayOfDoubles = "ARRAY_CONSTRUCT(1.0e100, 1.0e-100)::ARRAY(DOUBLE)"; + command.CommandText = $"SELECT {arrayOfDoubles}"; + var reader = (SnowflakeDbDataReader) command.ExecuteReader(); + Assert.IsTrue(reader.Read()); + + // act + var array = reader.GetArray(0); + + // assert + Assert.AreEqual(2, array.Length); + CollectionAssert.AreEqual(new[] { 1.0e100d, 1.0e-100d }, array); + } + } + } + [Test] public void TestSelectStringArrayWithNulls() { diff --git a/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs b/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs index 6f88126d9..22f8310a1 100644 --- a/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs @@ -312,13 +312,90 @@ public void TestSelectDateTime(string dbValue, string dbType, DateTime? expected internal static IEnumerable DateTimeConversionCases() { - yield return new object[] { "2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime() }; - yield return new object[] { "2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTime.Parse("2024-07-11 09:20:05").ToUniversalTime() }; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTime.Parse("2024-07-11 21:20:05").ToUniversalTime() }; - yield return new object[] { "2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11").ToUniversalTime(), DateTime.Parse("2024-07-11").ToUniversalTime() }; - yield return new object[] { "2024-07-11 14:20:05.123456789", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.1234567").ToUniversalTime(), DateTime.Parse("2024-07-11 14:20:05.1234568").ToUniversalTime()}; - yield return new object[] { "2024-07-11 14:20:05.123456789 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTime.Parse("2024-07-11 09:20:05.1234568").ToUniversalTime() }; - yield return new object[] {"2024-07-11 14:20:05.123456789 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTime.Parse("2024-07-11 21:20:05.1234568").ToUniversalTime() }; + yield return new object[] + { + "2024-07-11 14:20:05", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05"), + DateTime.Parse("2024-07-11 14:20:05") // kind -> Unspecified + }; + yield return new object[] + { + "2024-07-11 14:20:05 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05"), DateTimeKind.Utc) + }; + yield return new object[] + { + "2024-07-11 14:20:05 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTime.Parse("2024-07-11 21:20:05").ToLocalTime() + }; + yield return new object[] + { + "2024-07-11", + SFDataType.DATE.ToString(), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified) + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05.1234567"), + DateTime.Parse("2024-07-11 14:20:05.1234568") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05.1234568"), DateTimeKind.Utc) + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTime.Parse("2024-07-11 21:20:05.1234568").ToLocalTime() + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("9999-12-31 23:59:59.999999"), + DateTime.Parse("9999-12-31 23:59:59.999999") + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999 +1:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("9999-12-31 22:59:59.999999"), DateTimeKind.Utc) + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999 +13:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTime.Parse("9999-12-31 10:59:59.999999").ToLocalTime() + }; + yield return new object[] + { + "0001-01-01 00:00:00", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("0001-01-01 00:00:00"), + DateTime.Parse("0001-01-01 00:00:00") + }; + yield return new object[] + { + "0001-01-01 00:00:00 -1:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("0001-01-01 01:00:00"), DateTimeKind.Utc) + }; } [Test] @@ -354,13 +431,90 @@ public void TestSelectDateTimeOffset(string dbValue, string dbType, DateTime? ex internal static IEnumerable DateTimeOffsetConversionCases() { - yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11 14:20:05Z")}; - yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00")}; - yield return new object[] {"2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11Z")}; - yield return new object[] {"2024-07-11 14:20:05.123456789", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.1234567").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11 14:20:05.1234568Z")}; - yield return new object[] {"2024-07-11 14:20:05.123456789 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456789 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 -7:00")}; + yield return new object[] + { + "2024-07-11 14:20:05", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05"), + DateTimeOffset.Parse("2024-07-11 14:20:05Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00") + }; + yield return new object[] + { + "2024-07-11 14:20:05 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00").ToLocalTime() + }; + yield return new object[] + { + "2024-07-11", + SFDataType.DATE.ToString(), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified), + DateTimeOffset.Parse("2024-07-11Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05.1234567"), + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 +5:00") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 -7:00") + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("9999-12-31 23:59:59.999999"), + DateTimeOffset.Parse("9999-12-31 23:59:59.999999Z") + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999 +1:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("9999-12-31 23:59:59.999999 +1:00") + }; + yield return new object[] + { + "9999-12-31 23:59:59.999999 +13:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTimeOffset.Parse("9999-12-31 23:59:59.999999 +13:00") + }; + yield return new object[] + { + "0001-01-01 00:00:00", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("0001-01-01 00:00:00"), + DateTimeOffset.Parse("0001-01-01 00:00:00Z") + }; + yield return new object[] + { + "0001-01-01 00:00:00 -1:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("0001-01-01 00:00:00 -1:00") + }; } private TimeZoneInfo GetTimeZone(SnowflakeDbConnection connection) diff --git a/Snowflake.Data.Tests/IntegrationTests/VectorTypesIT.cs b/Snowflake.Data.Tests/IntegrationTests/VectorTypesIT.cs new file mode 100644 index 000000000..07a24a91b --- /dev/null +++ b/Snowflake.Data.Tests/IntegrationTests/VectorTypesIT.cs @@ -0,0 +1,356 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ + +using NUnit.Framework; +using Snowflake.Data.Client; +using System.Data.Common; +using Snowflake.Data.Core; +using System; + +namespace Snowflake.Data.Tests.IntegrationTests +{ + [TestFixture] + [TestFixture(ResultFormat.ARROW)] + [TestFixture(ResultFormat.JSON)] + class VectorTypesIT : SFBaseTest + { + private readonly ResultFormat _resultFormat; + + public VectorTypesIT(ResultFormat resultFormat) + { + _resultFormat = resultFormat; + } + + [Test] + public void TestSelectIntVectorFromTable() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = $"CREATE OR REPLACE TABLE {TableName} (a VECTOR(INT, 3));"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [1,2,3]::VECTOR(INT,3);"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [4,5,6]::VECTOR(INT,3);"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [7,8,9]::VECTOR(INT,3);"; + command.ExecuteNonQuery(); + + command.CommandText = $"SELECT COUNT(*) FROM {TableName};"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + Assert.IsTrue(reader.Read()); + Assert.AreEqual(3, reader.GetInt16(0)); + + command.CommandText = $"SELECT * FROM {TableName};"; + reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1,2,3]", reader.GetString(0)); + var arr = reader.GetArray(0); + Assert.AreEqual(1, arr[0]); + Assert.AreEqual(2, arr[1]); + Assert.AreEqual(3, arr[2]); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[4,5,6]", reader.GetString(0)); + arr = reader.GetArray(0); + Assert.AreEqual(4, arr[0]); + Assert.AreEqual(5, arr[1]); + Assert.AreEqual(6, arr[2]); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[7,8,9]", reader.GetString(0)); + arr = reader.GetArray(0); + Assert.AreEqual(7, arr[0]); + Assert.AreEqual(8, arr[1]); + Assert.AreEqual(9, arr[2]); + + command.CommandText = $"DROP TABLE IF EXISTS {TableName};"; + command.ExecuteNonQuery(); + } + } + } + + [Test] + public void TestSelectFloatVectorFromTable() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = $"CREATE OR REPLACE TABLE {TableName} (a VECTOR(FLOAT, 3));"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [1.1,2.2,3.3]::VECTOR(FLOAT,3);"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [4.4,5.5,6.6]::VECTOR(FLOAT,3);"; + command.ExecuteNonQuery(); + command.CommandText = $"INSERT INTO {TableName} SELECT [7.7,8.8,9.9]::VECTOR(FLOAT,3);"; + command.ExecuteNonQuery(); + + command.CommandText = $"SELECT COUNT(*) FROM {TableName};"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + Assert.IsTrue(reader.Read()); + Assert.AreEqual(3, reader.GetInt16(0)); + + command.CommandText = $"SELECT * FROM {TableName};"; + reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1.100000,2.200000,3.300000]", reader.GetString(0)); + var arr = reader.GetArray(0); + Assert.AreEqual(1.1f, arr[0]); + Assert.AreEqual(2.2f, arr[1]); + Assert.AreEqual(3.3f, arr[2]); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[4.400000,5.500000,6.600000]", reader.GetString(0)); + arr = reader.GetArray(0); + Assert.AreEqual(4.4f, arr[0]); + Assert.AreEqual(5.5f, arr[1]); + Assert.AreEqual(6.6f, arr[2]); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[7.700000,8.800000,9.900000]", reader.GetString(0)); + arr = reader.GetArray(0); + Assert.AreEqual(7.7f, arr[0]); + Assert.AreEqual(8.8f, arr[1]); + Assert.AreEqual(9.9f, arr[2]); + + command.CommandText = $"DROP TABLE IF EXISTS {TableName};"; + command.ExecuteNonQuery(); + } + } + } + + [Test] + public void TestSelectIntVector() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [1, 2, 3]::VECTOR(INT, 3) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1,2,3]", reader.GetString(0)); + + var arr = reader.GetArray(0); + Assert.AreEqual(1, arr[0]); + Assert.AreEqual(2, arr[1]); + Assert.AreEqual(3, arr[2]); + } + } + } + + [Test] + public void TestSelectIntVectorWithMinAndMax32BitValues() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = $"SELECT [{Int32.MinValue}, {Int32.MaxValue}]::VECTOR(INT, 2) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual($"[{Int32.MinValue},{Int32.MaxValue}]", reader.GetString(0)); + + var arr = reader.GetArray(0); + Assert.AreEqual(Int32.MinValue, arr[0]); + Assert.AreEqual(Int32.MaxValue, arr[1]); + } + } + } + + [Test] + public void TestThrowExceptionForInvalidValueForIntVector() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [1.1]::VECTOR(INT, 3) as vec;"; + + var thrown = Assert.Throws(() => command.ExecuteReader()); + + Assert.That(thrown.Message, Does.Contain("Array-like value being cast to a vector has incorrect dimension")); + } + } + } + + [Test] + public void TestThrowExceptionForInvalidIdentifierForIntVector() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [A, B, C]::VECTOR(INT, 3) as vec;"; + + var thrown = Assert.Throws(() => command.ExecuteReader()); + + Assert.That(thrown.Message, Does.Contain("invalid identifier")); + } + } + } + + [Test] + public void TestSelectFloatVector() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [1.1,2.22,3.333]::VECTOR(FLOAT, 3) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1.100000,2.220000,3.333000]", reader.GetString(0)); + + var arr = reader.GetArray(0); + Assert.AreEqual(1.1f, arr[0]); + Assert.AreEqual(2.22f, arr[1]); + Assert.AreEqual(3.333f, arr[2]); + } + } + } + + [Test] + public void TestSelectFloatVectorWithMinAndMaxFloatValues() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = $"SELECT [{float.MinValue}, {float.MaxValue}]::VECTOR(FLOAT, 2) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + + var arr = reader.GetArray(0); +#if NETFRAMEWORK + Assert.AreEqual(float.MinValue.ToString(), arr[0].ToString()); + Assert.AreEqual(float.MaxValue.ToString(), arr[1].ToString()); +#else + Assert.AreEqual(float.MinValue, arr[0]); + Assert.AreEqual(float.MaxValue, arr[1]); +#endif + } + } + } + + [Test] + public void TestSelectFloatVectorWithNoDecimals() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [1,2,3]::VECTOR(FLOAT, 3) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1.000000,2.000000,3.000000]", reader.GetString(0)); + + var arr = reader.GetArray(0); + Assert.AreEqual(1f, arr[0]); + Assert.AreEqual(2f, arr[1]); + Assert.AreEqual(3f, arr[2]); + } + } + } + + [Test] + public void TestSelectFloatVectorWithGreaterThanSixDigitPrecision() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [1.123456789,2.123456789,3.123456789]::VECTOR(FLOAT, 3) as vec;"; + var reader = (SnowflakeDbDataReader)command.ExecuteReader(); + + Assert.IsTrue(reader.Read()); + Assert.AreEqual("[1.123457,2.123457,3.123457]", reader.GetString(0)); + + var arr = reader.GetArray(0); + Assert.AreEqual(1.123457f, arr[0]); + Assert.AreEqual(2.123457f, arr[1]); + Assert.AreEqual(3.123457f, arr[2]); + } + } + } + + [Test] + public void TestThrowExceptionForInvalidIdentifierForFloatVector() + { + using (DbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString; + conn.Open(); + AlterSessionSettings(conn); + + using (DbCommand command = conn.CreateCommand()) + { + command.CommandText = "SELECT [A, B, C]::VECTOR(FLOAT, 3) as vec;"; + + var thrown = Assert.Throws(() => command.ExecuteReader()); + + Assert.That(thrown.Message, Does.Contain("invalid identifier")); + } + } + } + + private void AlterSessionSettings(DbConnection conn) + { + using (var command = conn.CreateCommand()) + { + command.CommandText = $"ALTER SESSION SET DOTNET_QUERY_RESULT_FORMAT = {_resultFormat}"; + command.ExecuteNonQuery(); + } + } + } +} diff --git a/Snowflake.Data.Tests/Mock/MockGCSClient.cs b/Snowflake.Data.Tests/Mock/MockGCSClient.cs index cb36918ae..a25d4279a 100644 --- a/Snowflake.Data.Tests/Mock/MockGCSClient.cs +++ b/Snowflake.Data.Tests/Mock/MockGCSClient.cs @@ -25,7 +25,7 @@ class MockGCSClient internal const string GcsFileContent = "GCSClientTest"; // Create a mock response for GetFileHeader - static internal HttpWebResponse CreateResponseForFileHeader(HttpStatusCode httpStatusCode) + internal static HttpWebResponse CreateResponseForFileHeader(HttpStatusCode httpStatusCode) { var response = new Mock(); @@ -46,14 +46,18 @@ static internal HttpWebResponse CreateResponseForFileHeader(HttpStatusCode httpS } // Create a mock response for UploadFile - static internal HttpWebResponse CreateResponseForUploadFile(HttpStatusCode httpStatusCode) + internal static HttpWebResponse CreateResponseForUploadFile(HttpStatusCode? httpStatusCode) { var response = new Mock(); - if (httpStatusCode != HttpStatusCode.OK) + if (httpStatusCode is null) + { + throw new WebException("Mock GCS Error - no response", null, 0, null); + } + else if (httpStatusCode != HttpStatusCode.OK) { response.SetupGet(c => c.StatusCode) - .Returns(httpStatusCode); + .Returns(httpStatusCode.Value); throw new WebException("Mock GCS Error", null, 0, response.Object); } @@ -61,11 +65,15 @@ static internal HttpWebResponse CreateResponseForUploadFile(HttpStatusCode httpS } // Create a mock response for DownloadFile - static internal HttpWebResponse CreateResponseForDownloadFile(HttpStatusCode httpStatusCode) + internal static HttpWebResponse CreateResponseForDownloadFile(HttpStatusCode? httpStatusCode) { var response = new Mock(); - if (httpStatusCode == HttpStatusCode.OK) + if (httpStatusCode is null) + { + throw new WebException("Mock GCS Error - no response", null, 0, null); + } + else if (httpStatusCode == HttpStatusCode.OK) { response.Setup(c => c.Headers).Returns(new WebHeaderCollection()); response.Object.Headers.Add(SFGCSClient.GCS_METADATA_ENCRYPTIONDATAPROP, @@ -82,7 +90,7 @@ static internal HttpWebResponse CreateResponseForDownloadFile(HttpStatusCode htt else { response.SetupGet(c => c.StatusCode) - .Returns(httpStatusCode); + .Returns(httpStatusCode.Value); throw new WebException("Mock GCS Error", null, 0, response.Object); } diff --git a/Snowflake.Data.Tests/UnitTests/ArrowResultSetTest.cs b/Snowflake.Data.Tests/UnitTests/ArrowResultSetTest.cs index 026671783..0405c7009 100755 --- a/Snowflake.Data.Tests/UnitTests/ArrowResultSetTest.cs +++ b/Snowflake.Data.Tests/UnitTests/ArrowResultSetTest.cs @@ -12,6 +12,7 @@ using Apache.Arrow; using Apache.Arrow.Ipc; using NUnit.Framework; +using Snowflake.Data.Client; using Snowflake.Data.Core; using Snowflake.Data.Tests.Util; @@ -459,6 +460,45 @@ public void TestGetTimestampNtz() } } + [Test] + public void TestThrowsExceptionForResultSetWithUnknownSFDataType() + { + const string UnknownDataType = "FAKE_TYPE"; + QueryExecResponseData responseData = new QueryExecResponseData() + { + rowType = new List() + { + new ExecResponseRowType + { + name = "name", + type = UnknownDataType + } + } + }; + + var exception = Assert.Throws(() => new ArrowResultSet(responseData, PrepareStatement(), new CancellationToken())); + Assert.IsTrue(exception.Message.Contains($"Unknown column type: {UnknownDataType}")); + } + + [Test] + public void TestThrowsExceptionForResultSetWithUnknownNativeType() + { + QueryExecResponseData responseData = new QueryExecResponseData() + { + rowType = new List() + { + new ExecResponseRowType + { + name = "name", + type = SFDataType.None.ToString() + } + } + }; + + var exception = Assert.Throws(() => new ArrowResultSet(responseData, PrepareStatement(), new CancellationToken())); + Assert.IsTrue(exception.Message.Contains($"Unknown column type: {SFDataType.None.ToString()}")); + } + private void PrepareTestCase(SFDataType sfType, long scale, object values) { _recordBatch = ArrowResultChunkTest.PrepareRecordBatch(sfType, scale, values); diff --git a/Snowflake.Data.Tests/UnitTests/SFBindUploaderTest.cs b/Snowflake.Data.Tests/UnitTests/SFBindUploaderTest.cs index ac5172086..46e5b5b90 100644 --- a/Snowflake.Data.Tests/UnitTests/SFBindUploaderTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFBindUploaderTest.cs @@ -20,7 +20,7 @@ public void TestCsvDataConversionForDate(SFDataType dbType, string input, string { // Arrange var dateExpected = DateTime.Parse(expected); - var check = SFDataConverter.csharpValToSfVal(SFDataType.DATE, dateExpected); + var check = SFDataConverter.CSharpValToSfVal(SFDataType.DATE, dateExpected); Assert.AreEqual(check, input); // Act DateTime dateActual = DateTime.Parse(_bindUploader.GetCSVData(dbType.ToString(), input)); @@ -37,51 +37,60 @@ public void TestCsvDataConversionForTime(SFDataType dbType, string input, string { // Arrange DateTime timeExpected = DateTime.Parse(expected); - var check = SFDataConverter.csharpValToSfVal(SFDataType.TIME, timeExpected); + var check = SFDataConverter.CSharpValToSfVal(SFDataType.TIME, timeExpected); Assert.AreEqual(check, input); // Act DateTime timeActual = DateTime.Parse(_bindUploader.GetCSVData(dbType.ToString(), input)); // Assert Assert.AreEqual(timeExpected, timeActual); } - - [TestCase(SFDataType.TIMESTAMP_LTZ, "39600000000000", "1970-01-01T12:00:00.0000000+01:00")] + + [TestCase(SFDataType.TIMESTAMP_LTZ, "0", "1970-01-01T00:00:00.0000000+00:00")] + [TestCase(SFDataType.TIMESTAMP_LTZ, "39600000000000", "1970-01-01T12:00:00.0000000+01:00")] [TestCase(SFDataType.TIMESTAMP_LTZ, "1341136800000000000", "2012-07-01T12:00:00.0000000+02:00")] [TestCase(SFDataType.TIMESTAMP_LTZ, "352245599987654000", "1981-02-28T23:59:59.9876540+02:00")] [TestCase(SFDataType.TIMESTAMP_LTZ, "1678868249207000000", "2023/03/15T13:17:29.207+05:00")] + [TestCase(SFDataType.TIMESTAMP_LTZ, "253402300799999999900", "9999-12-31T23:59:59.9999999+00:00")] + [TestCase(SFDataType.TIMESTAMP_LTZ, "-62135596800000000000", "0001-01-01T00:00:00.0000000+00:00")] public void TestCsvDataConversionForTimestampLtz(SFDataType dbType, string input, string expected) { // Arrange var timestampExpected = DateTimeOffset.Parse(expected); - var check = SFDataConverter.csharpValToSfVal(SFDataType.TIMESTAMP_LTZ, timestampExpected); + var check = SFDataConverter.CSharpValToSfVal(SFDataType.TIMESTAMP_LTZ, timestampExpected); Assert.AreEqual(check, input); // Act var timestampActual = DateTimeOffset.Parse(_bindUploader.GetCSVData(dbType.ToString(), input)); // Assert Assert.AreEqual(timestampExpected.ToLocalTime(), timestampActual); } - + + [TestCase(SFDataType.TIMESTAMP_TZ, "0 1440", "1970-01-01 00:00:00.000000 +00:00")] [TestCase(SFDataType.TIMESTAMP_TZ, "1341136800000000000 1560", "2012-07-01 12:00:00.000000 +02:00")] [TestCase(SFDataType.TIMESTAMP_TZ, "352245599987654000 1560", "1981-02-28 23:59:59.987654 +02:00")] + [TestCase(SFDataType.TIMESTAMP_TZ, "253402300799999999000 1440", "9999-12-31 23:59:59.999999 +00:00")] + [TestCase(SFDataType.TIMESTAMP_TZ, "-62135596800000000000 1440", "0001-01-01 00:00:00.000000 +00:00")] public void TestCsvDataConversionForTimestampTz(SFDataType dbType, string input, string expected) { // Arrange DateTimeOffset timestampExpected = DateTimeOffset.Parse(expected); - var check = SFDataConverter.csharpValToSfVal(SFDataType.TIMESTAMP_TZ, timestampExpected); + var check = SFDataConverter.CSharpValToSfVal(SFDataType.TIMESTAMP_TZ, timestampExpected); Assert.AreEqual(check, input); // Act DateTimeOffset timestampActual = DateTimeOffset.Parse(_bindUploader.GetCSVData(dbType.ToString(), input)); // Assert Assert.AreEqual(timestampExpected, timestampActual); } - + + [TestCase(SFDataType.TIMESTAMP_NTZ, "0", "1970-01-01 00:00:00.000000")] [TestCase(SFDataType.TIMESTAMP_NTZ, "1341144000000000000", "2012-07-01 12:00:00.000000")] [TestCase(SFDataType.TIMESTAMP_NTZ, "352252799987654000", "1981-02-28 23:59:59.987654")] + [TestCase(SFDataType.TIMESTAMP_NTZ, "253402300799999999000", "9999-12-31 23:59:59.999999")] + [TestCase(SFDataType.TIMESTAMP_NTZ, "-62135596800000000000", "0001-01-01 00:00:00.000000")] public void TestCsvDataConversionForTimestampNtz(SFDataType dbType, string input, string expected) { - // Arrange + // Arrange DateTime timestampExpected = DateTime.Parse(expected); - var check = SFDataConverter.csharpValToSfVal(SFDataType.TIMESTAMP_NTZ, timestampExpected); + var check = SFDataConverter.CSharpValToSfVal(SFDataType.TIMESTAMP_NTZ, timestampExpected); Assert.AreEqual(check, input); // Act DateTime timestampActual = DateTime.Parse(_bindUploader.GetCSVData(dbType.ToString(), input)); diff --git a/Snowflake.Data.Tests/UnitTests/SFDataConverterTest.cs b/Snowflake.Data.Tests/UnitTests/SFDataConverterTest.cs index 65160ac97..7def7ce6a 100755 --- a/Snowflake.Data.Tests/UnitTests/SFDataConverterTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFDataConverterTest.cs @@ -4,6 +4,8 @@ using System; using System.Text; +using Snowflake.Data.Client; +using Snowflake.Data.Tests.Util; namespace Snowflake.Data.Tests.UnitTests { @@ -36,8 +38,8 @@ public void TestConvertBindToSFValFinlandLocale() Thread.CurrentThread.CurrentCulture = ci; - System.Tuple t = - SFDataConverter.csharpTypeValToSfTypeVal(System.Data.DbType.Double, 1.2345); + System.Tuple t = + SFDataConverter.CSharpTypeValToSfTypeVal(System.Data.DbType.Double, 1.2345); Assert.AreEqual("REAL", t.Item1); Assert.AreEqual("1.2345", t.Item2); @@ -109,7 +111,7 @@ public void TestConvertTimeSpan(string inputTimeStr) var tickDiff = val.Ticks; var inputStringAsItComesBackFromDatabase = (tickDiff / 10000000.0m).ToString(CultureInfo.InvariantCulture); inputStringAsItComesBackFromDatabase += inputTimeStr.Substring(8, inputTimeStr.Length - 8); - + // Run the conversion var result = SFDataConverter.ConvertToCSharpVal(ConvertToUTF8Buffer(inputStringAsItComesBackFromDatabase), SFDataType.TIME, typeof(TimeSpan)); @@ -148,7 +150,7 @@ public void TestConvertDate(string inputTimeStr, object kind = null) private void internalTestConvertDate(DateTime dtExpected, DateTime testValue) { - var result = SFDataConverter.csharpTypeValToSfTypeVal(System.Data.DbType.Date, testValue); + var result = SFDataConverter.CSharpTypeValToSfTypeVal(System.Data.DbType.Date, testValue); // Convert result to DateTime for easier interpretation var unixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); DateTime dtResult = unixEpoch.AddMilliseconds(Int64.Parse(result.Item2)); @@ -326,5 +328,25 @@ public void TestInvalidConversionInvalidDecimal(string s) Assert.Throws(() => SFDataConverter.ConvertToCSharpVal(ConvertToUTF8Buffer(s), SFDataType.FIXED, typeof(decimal))); } + [Test] + [TestCase(SFDataType.TIMESTAMP_LTZ, typeof(DateTime))] + [TestCase(SFDataType.TIMESTAMP_TZ, typeof(DateTime))] + [TestCase(SFDataType.TIMESTAMP_NTZ, typeof(DateTimeOffset))] + [TestCase(SFDataType.TIME, typeof(DateTimeOffset))] + [TestCase(SFDataType.DATE, typeof(DateTimeOffset))] + public void TestInvalidTimestampConversion(SFDataType dataType, Type unsupportedType) + { + object unsupportedObject; + if (unsupportedType == typeof(DateTimeOffset)) + unsupportedObject = new DateTimeOffset(); + else if (unsupportedType == typeof(DateTime)) + unsupportedObject = new DateTime(); + else + unsupportedObject = null; + + Assert.NotNull(unsupportedObject); + SnowflakeDbException ex = Assert.Throws(() => SFDataConverter.CSharpValToSfVal(dataType, unsupportedObject)); + SnowflakeDbExceptionAssert.HasErrorCode(ex, SFError.INVALID_DATA_CONVERSION); + } } } diff --git a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs index 925ce4c98..0fad57542 100644 --- a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs @@ -223,16 +223,14 @@ private void AssertForGetFileHeaderTests(ResultStatus expectedResultStatus, File [TestCase(HttpStatusCode.Forbidden, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.InternalServerError, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.ServiceUnavailable, ResultStatus.NEED_RETRY)] - public void TestUploadFile(HttpStatusCode httpStatusCode, ResultStatus expectedResultStatus) + [TestCase(null, ResultStatus.ERROR)] + public void TestUploadFile(HttpStatusCode? httpStatusCode, ResultStatus expectedResultStatus) { // Arrange var mockWebRequest = new Mock(); mockWebRequest.Setup(c => c.Headers).Returns(new WebHeaderCollection()); mockWebRequest.Setup(client => client.GetResponse()) - .Returns(() => - { - return MockGCSClient.CreateResponseForUploadFile(httpStatusCode); - }); + .Returns(() => MockGCSClient.CreateResponseForUploadFile(httpStatusCode)); mockWebRequest.Setup(client => client.GetRequestStream()) .Returns(() => new MemoryStream()); _client.SetCustomWebRequest(mockWebRequest.Object); @@ -257,16 +255,14 @@ public void TestUploadFile(HttpStatusCode httpStatusCode, ResultStatus expectedR [TestCase(HttpStatusCode.Forbidden, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.InternalServerError, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.ServiceUnavailable, ResultStatus.NEED_RETRY)] - public async Task TestUploadFileAsync(HttpStatusCode httpStatusCode, ResultStatus expectedResultStatus) + [TestCase(null, ResultStatus.ERROR)] + public async Task TestUploadFileAsync(HttpStatusCode? httpStatusCode, ResultStatus expectedResultStatus) { // Arrange var mockWebRequest = new Mock(); mockWebRequest.Setup(c => c.Headers).Returns(new WebHeaderCollection()); mockWebRequest.Setup(client => client.GetResponseAsync()) - .Returns(() => - { - return Task.FromResult((WebResponse)MockGCSClient.CreateResponseForUploadFile(httpStatusCode)); - }); + .Returns(() => Task.FromResult((WebResponse)MockGCSClient.CreateResponseForUploadFile(httpStatusCode))); mockWebRequest.Setup(client => client.GetRequestStreamAsync()) .Returns(() => Task.FromResult((Stream) new MemoryStream())); _client.SetCustomWebRequest(mockWebRequest.Object); @@ -301,7 +297,8 @@ private void AssertForUploadFileTests(ResultStatus expectedResultStatus) [TestCase(HttpStatusCode.Forbidden, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.InternalServerError, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.ServiceUnavailable, ResultStatus.NEED_RETRY)] - public void TestDownloadFile(HttpStatusCode httpStatusCode, ResultStatus expectedResultStatus) + [TestCase(null, ResultStatus.ERROR)] + public void TestDownloadFile(HttpStatusCode? httpStatusCode, ResultStatus expectedResultStatus) { // Arrange var mockWebRequest = new Mock(); @@ -325,7 +322,8 @@ public void TestDownloadFile(HttpStatusCode httpStatusCode, ResultStatus expecte [TestCase(HttpStatusCode.Forbidden, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.InternalServerError, ResultStatus.NEED_RETRY)] [TestCase(HttpStatusCode.ServiceUnavailable, ResultStatus.NEED_RETRY)] - public async Task TestDownloadFileAsync(HttpStatusCode httpStatusCode, ResultStatus expectedResultStatus) + [TestCase(null, ResultStatus.ERROR)] + public async Task TestDownloadFileAsync(HttpStatusCode? httpStatusCode, ResultStatus expectedResultStatus) { // Arrange var mockWebRequest = new Mock(); diff --git a/Snowflake.Data.Tests/UnitTests/SFReusableChunkTest.cs b/Snowflake.Data.Tests/UnitTests/SFReusableChunkTest.cs index 6f021994b..25627dcaf 100755 --- a/Snowflake.Data.Tests/UnitTests/SFReusableChunkTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFReusableChunkTest.cs @@ -219,6 +219,33 @@ public void TestResetClearsChunkData() Assert.AreEqual(0, chunk.ChunkIndex); Assert.AreEqual(chunkInfo.url, chunk.Url); Assert.AreEqual(chunkInfo.rowCount, chunk.RowCount); + Assert.AreEqual(chunkInfo.uncompressedSize, chunk.UncompressedSize); + Assert.Greater(chunk.data.blockCount, 0); + Assert.Greater(chunk.data.metaBlockCount, 0); + } + + [Test] + public void TestClearRemovesAllChunkData() + { + const int RowCount = 3; + string data = "[ [\"1\"], [\"2\"], [\"3\"] ]"; + var chunk = PrepareChunkAsync(data, 1, RowCount).Result; + + ExecResponseChunk chunkInfo = new ExecResponseChunk() + { + url = "new_url", + uncompressedSize = 100, + rowCount = 200 + }; + + chunk.Clear(); + + Assert.AreEqual(0, chunk.ChunkIndex); + Assert.AreEqual(null, chunk.Url); + Assert.AreEqual(0, chunk.RowCount); + Assert.AreEqual(0, chunk.UncompressedSize); + Assert.AreEqual(0, chunk.data.blockCount); + Assert.AreEqual(0, chunk.data.metaBlockCount); } private async Task PrepareChunkAsync(string stringData, int colCount, int rowCount) diff --git a/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs b/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs index a10b4660c..cff0c6959 100644 --- a/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs +++ b/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs @@ -23,25 +23,42 @@ public void TestTimeConversions(string value, string sfTypeString, object expect // assert Assert.AreEqual(expected, result); + + if (csharpType == typeof(DateTime)) + { + Assert.AreEqual(((DateTime)expected).Kind,((DateTime)result).Kind); + } } internal static IEnumerable TimeConversionCases() { - yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05")}; yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05Z")}; yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.Parse("2024-07-11 09:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05"), DateTimeKind.Utc)}; yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00")}; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05").ToLocalTime()}; yield return new object[] {"14:20:05", SFDataType.TIME.ToString(), TimeSpan.Parse("14:20:05")}; yield return new object[] {"2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11")}; - yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.123456")}; yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456Z")}; yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.Parse("2024-07-11 09:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05.123456"), DateTimeKind.Utc)}; yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456 -7:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05.123456").ToLocalTime()}; yield return new object[] {"14:20:05.123456", SFDataType.TIME.ToString(), TimeSpan.Parse("14:20:05.123456")}; + yield return new object[] {"9999-12-31 23:59:59.999999", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("9999-12-31 23:59:59.999999")}; + yield return new object[] {"9999-12-31 23:59:59.999999", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("9999-12-31 23:59:59.999999Z")}; + yield return new object[] {"9999-12-31 23:59:59.999999 +1:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("9999-12-31 23:59:59.999999 +1:00")}; + yield return new object[] {"9999-12-31 23:59:59.999999 +1:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("9999-12-31 22:59:59.999999"), DateTimeKind.Utc)}; + yield return new object[] {"9999-12-31 23:59:59.999999 +1:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("9999-12-31 23:59:59.999999 +1:00")}; + yield return new object[] {"9999-12-31 23:59:59.999999 +13:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("9999-12-31 10:59:59.999999").ToLocalTime()}; + yield return new object[] {"0001-01-01 00:00:00.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("0001-01-01 00:00:00.123456")}; + yield return new object[] {"0001-01-01 00:00:00.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("0001-01-01 00:00:00.123456Z")}; + yield return new object[] {"0001-01-01 00:00:00.123456 -1:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("0001-01-01 00:00:00.123456 -1:00")}; + yield return new object[] {"0001-01-01 00:00:00.123456 -1:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("0001-01-01 01:00:00.123456"), DateTimeKind.Utc)}; + yield return new object[] {"0001-01-01 00:00:00.123456 -1:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("0001-01-01 00:00:00.123456 -1:00")}; + yield return new object[] {"0001-01-01 00:00:00.123456 -13:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("0001-01-01 13:00:00.123456").ToLocalTime()}; } } } diff --git a/Snowflake.Data/Client/SnowflakeDbCommand.cs b/Snowflake.Data/Client/SnowflakeDbCommand.cs index b52d53643..68d3dccb0 100755 --- a/Snowflake.Data/Client/SnowflakeDbCommand.cs +++ b/Snowflake.Data/Client/SnowflakeDbCommand.cs @@ -393,7 +393,7 @@ private static Dictionary convertToBindList(List typeAndVal = SFDataConverter - .csharpTypeValToSfTypeVal(parameter.DbType, val); + .CSharpTypeValToSfTypeVal(parameter.DbType, val); bindingType = typeAndVal.Item1; vals.Add(typeAndVal.Item2); @@ -401,7 +401,7 @@ private static Dictionary convertToBindList(List convertToBindList(List typeAndVal = SFDataConverter - .csharpTypeValToSfTypeVal(parameter.DbType, parameter.Value); + .CSharpTypeValToSfTypeVal(parameter.DbType, parameter.Value); bindingType = typeAndVal.Item1; bindingVal = typeAndVal.Item2; } else { bindingType = parameter.SFDataType.ToString(); - bindingVal = SFDataConverter.csharpValToSfVal(parameter.SFDataType, parameter.Value); + bindingVal = SFDataConverter.CSharpValToSfVal(parameter.SFDataType, parameter.Value); } } diff --git a/Snowflake.Data/Client/SnowflakeDbDataReader.cs b/Snowflake.Data/Client/SnowflakeDbDataReader.cs index b874cc56f..7d475024a 100755 --- a/Snowflake.Data/Client/SnowflakeDbDataReader.cs +++ b/Snowflake.Data/Client/SnowflakeDbDataReader.cs @@ -189,10 +189,7 @@ public override double GetDouble(int ordinal) return resultSet.GetDouble(ordinal); } - public override IEnumerator GetEnumerator() - { - throw new NotImplementedException(); - } + public override IEnumerator GetEnumerator() => new DbEnumerator(this, closeReader: false); public override Type GetFieldType(int ordinal) { @@ -256,7 +253,7 @@ public override int GetValues(object[] values) return count; } - internal T GetObject(int ordinal) + public T GetObject(int ordinal) where T : class, new() { try @@ -279,15 +276,17 @@ internal T GetObject(int ordinal) } } - internal T[] GetArray(int ordinal) + public T[] GetArray(int ordinal) { try { var rowType = resultSet.sfResultSetMetaData.rowTypes[ordinal]; var fields = rowType.fields; - if (fields == null || fields.Count == 0 || !JsonToStructuredTypeConverter.IsArrayType(rowType.type)) + var isArrayOrVector = JsonToStructuredTypeConverter.IsArrayType(rowType.type) || + JsonToStructuredTypeConverter.IsVectorType(rowType.type); + if (fields == null || fields.Count == 0 || !isArrayOrVector) { - throw new StructuredTypesReadingException($"Method GetArray<{typeof(T)}> can be used only for structured array"); + throw new StructuredTypesReadingException($"Method GetArray<{typeof(T)}> can be used only for structured array or vector types"); } var stringValue = GetString(ordinal); @@ -302,7 +301,7 @@ internal T[] GetArray(int ordinal) } } - internal Dictionary GetMap(int ordinal) + public Dictionary GetMap(int ordinal) { try { diff --git a/Snowflake.Data/Core/ArrowResultChunk.cs b/Snowflake.Data/Core/ArrowResultChunk.cs index 901c01692..85e5de62c 100755 --- a/Snowflake.Data/Core/ArrowResultChunk.cs +++ b/Snowflake.Data/Core/ArrowResultChunk.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; +using System.Text; using Apache.Arrow; namespace Snowflake.Data.Core @@ -13,16 +14,16 @@ internal class ArrowResultChunk : BaseResultChunk internal override ResultFormat ResultFormat => ResultFormat.ARROW; private static readonly DateTimeOffset s_epochDate = SFDataConverter.UnixEpoch; - - private static readonly long[] s_powersOf10 = { - 1, - 10, - 100, - 1000, - 10000, - 100000, - 1000000, - 10000000, + + private static readonly long[] s_powersOf10 = { + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, 100000000, 1000000000 }; @@ -40,7 +41,6 @@ internal class ArrowResultChunk : BaseResultChunk private byte[][] _byte; private double[][] _double; - private int _currentBatchIndex; private int _currentRecordIndex = -1; @@ -62,7 +62,7 @@ public ArrowResultChunk(RecordBatch recordBatch) RowCount = recordBatch.Length; ColumnCount = recordBatch.ColumnCount; ChunkIndex = -1; - + ResetTempTables(); } @@ -81,11 +81,11 @@ public void AddRecordBatch(RecordBatch recordBatch) { RecordBatch.Add(recordBatch); } - + internal override void Reset(ExecResponseChunk chunkInfo, int chunkIndex) { base.Reset(chunkInfo, chunkIndex); - + _currentBatchIndex = 0; _currentRecordIndex = -1; RecordBatch.Clear(); @@ -97,7 +97,7 @@ internal override bool Next() { if (_currentBatchIndex >= RecordBatch.Count) return false; - + _currentRecordIndex += 1; if (_currentRecordIndex < RecordBatch[_currentBatchIndex].Length) return true; @@ -149,7 +149,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (column.IsNull(_currentRecordIndex)) return DBNull.Value; - + switch (srcType) { case SFDataType.FIXED: @@ -170,7 +170,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (scale == 0) return _short[columnIndex][_currentRecordIndex]; return _short[columnIndex][_currentRecordIndex] / (decimal)s_powersOf10[scale]; - + case Int32Array array: if (_int[columnIndex] == null) _int[columnIndex] = array.Values.ToArray(); @@ -184,7 +184,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (scale == 0) return _long[columnIndex][_currentRecordIndex]; return _long[columnIndex][_currentRecordIndex] / (decimal)s_powersOf10[scale]; - + case Decimal128Array array: return array.GetValue(_currentRecordIndex); } @@ -210,22 +210,56 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) _int[columnIndex] = ((StringArray)column).ValueOffsets.ToArray(); } return StringArray.DefaultEncoding.GetString( - _byte[columnIndex], - _int[columnIndex][_currentRecordIndex], + _byte[columnIndex], + _int[columnIndex][_currentRecordIndex], _int[columnIndex][_currentRecordIndex + 1] - _int[columnIndex][_currentRecordIndex]); - + + case SFDataType.VECTOR: + var col = (FixedSizeListArray)column; + var values = col.Values; + var vectorLength = values.Length / col.Length; + StringBuilder sb = new StringBuilder("["); + switch (values) + { + case Int32Array array: + for (int i = 0; i < vectorLength; i++) + { + sb.Append(array.GetValue(i + (_currentRecordIndex * vectorLength))); + sb.Append(','); + } + break; + case FloatArray array: + for (int i = 0; i < vectorLength; i++) + { + float.TryParse(array.GetValue(i + (_currentRecordIndex * vectorLength)).ToString(), out float val); + if (val.ToString().Contains("E")) + { + sb.Append(val); + } + else + { + sb.Append(val.ToString("N6")); + } + sb.Append(','); + } + break; + } + sb.Length--; + sb.Append("]"); + return sb.ToString(); + case SFDataType.BINARY: return ((BinaryArray)column).GetBytes(_currentRecordIndex).ToArray(); - + case SFDataType.DATE: if (_int[columnIndex] == null) _int[columnIndex] = ((Date32Array)column).Values.ToArray(); - return SFDataConverter.UnixEpoch.AddTicks(_int[columnIndex][_currentRecordIndex] * TicksPerDay); - + return DateTime.SpecifyKind(SFDataConverter.UnixEpoch.AddTicks(_int[columnIndex][_currentRecordIndex] * TicksPerDay), DateTimeKind.Unspecified); + case SFDataType.TIME: { long value; - + if (column.GetType() == typeof(Int32Array)) { if (_int[columnIndex] == null) @@ -244,7 +278,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) value = _long[columnIndex][_currentRecordIndex]; } - + if (scale == 0) return DateTimeOffset.FromUnixTimeSeconds(value).DateTime; if (scale <= 3) @@ -258,7 +292,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) var structCol = (StructArray)column; if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)structCol.Fields[0]).Values.ToArray(); - + if (structCol.Fields.Count == 2) { if (_int[columnIndex] == null) @@ -275,7 +309,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) _fraction[columnIndex] = ((Int32Array)structCol.Fields[1]).Values.ToArray(); if (_int[columnIndex] == null) _int[columnIndex] = ((Int32Array)structCol.Fields[2]).Values.ToArray(); - + var epoch = _long[columnIndex][_currentRecordIndex]; var fraction = _fraction[columnIndex][_currentRecordIndex]; var timezone = _int[columnIndex][_currentRecordIndex]; @@ -297,7 +331,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) { if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)column).Values.ToArray(); - + var value = _long[columnIndex][_currentRecordIndex]; var epoch = ExtractEpoch(value, scale); var fraction = ExtractFraction(value, scale); @@ -319,7 +353,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) { if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)column).Values.ToArray(); - + var value = _long[columnIndex][_currentRecordIndex]; var epoch = ExtractEpoch(value, scale); var fraction = ExtractFraction(value, scale); @@ -328,7 +362,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) } throw new NotSupportedException($"Type {srcType} is not supported."); } - + private long ExtractEpoch(long value, long scale) { return value / s_powersOf10[scale]; diff --git a/Snowflake.Data/Core/ArrowResultSet.cs b/Snowflake.Data/Core/ArrowResultSet.cs index 56a636c4e..178531eaf 100755 --- a/Snowflake.Data/Core/ArrowResultSet.cs +++ b/Snowflake.Data/Core/ArrowResultSet.cs @@ -18,7 +18,7 @@ class ArrowResultSet : SFBaseResultSet internal override ResultFormat ResultFormat => ResultFormat.ARROW; private static readonly SFLogger s_logger = SFLoggerFactory.GetLogger(); - + private readonly int _totalChunkCount; private BaseResultChunk _currentChunk; private readonly IChunkDownloader _chunkDownloader; @@ -44,7 +44,7 @@ public ArrowResultSet(QueryExecResponseData responseData, SFStatement sfStatemen isClosed = false; queryId = responseData.queryId; - + ReadChunk(responseData); } catch(Exception ex) @@ -95,21 +95,21 @@ internal override async Task NextAsync() return false; } - + internal override bool Next() { ThrowIfClosed(); if (_currentChunk.Next()) return true; - + if (_totalChunkCount > 0) { s_logger.Debug($"Get next chunk from chunk downloader, chunk: {_currentChunk.ChunkIndex + 1}/{_totalChunkCount}" + $" rows: {_currentChunk.RowCount}, size compressed: {_currentChunk.CompressedSize}," + $" size uncompressed: {_currentChunk.UncompressedSize}"); _currentChunk = Task.Run(async() => await (_chunkDownloader.GetNextChunkAsync()).ConfigureAwait(false)).Result; - + return _currentChunk?.Next() ?? false; } @@ -154,21 +154,21 @@ internal override bool Rewind() return false; } - + private object GetObjectInternal(int ordinal) { ThrowIfClosed(); ThrowIfOutOfBounds(ordinal); - + var type = sfResultSetMetaData.GetTypesByIndex(ordinal).Item1; var scale = sfResultSetMetaData.GetScaleByIndex(ordinal); - + var value = ((ArrowResultChunk)_currentChunk).ExtractCell(ordinal, type, (int)scale); return value ?? DBNull.Value; - + } - + internal override object GetValue(int ordinal) { var value = GetObjectInternal(ordinal); @@ -176,7 +176,7 @@ internal override object GetValue(int ordinal) { return value; } - + object obj; checked { @@ -196,6 +196,10 @@ internal override object GetValue(int ordinal) break; case bool ret: obj = ret; break; + case DateTime ret: obj = ret; + break; + case DateTimeOffset ret: obj = ret; + break; default: { var dstType = sfResultSetMetaData.GetCSharpTypeByIndex(ordinal); @@ -217,7 +221,7 @@ internal override bool GetBoolean(int ordinal) { return (bool)GetObjectInternal(ordinal); } - + internal override byte GetByte(int ordinal) { var value = GetObjectInternal(ordinal); @@ -244,7 +248,7 @@ internal override char GetChar(int ordinal) { return ((string)GetObjectInternal(ordinal))[0]; } - + internal override long GetChars(int ordinal, long dataOffset, char[] buffer, int bufferOffset, int length) { return ReadSubset(ordinal, dataOffset, buffer, bufferOffset, length); @@ -303,7 +307,7 @@ internal override double GetDouble(int ordinal) case int ret: return ret; case short ret: return ret; case sbyte ret: return ret; - default: return (double)value; + default: return (double)value; } } @@ -374,7 +378,7 @@ internal override long GetInt64(int ordinal) } } } - + internal override string GetString(int ordinal) { var value = GetObjectInternal(ordinal); @@ -388,20 +392,20 @@ internal override string GetString(int ordinal) return ret; case DateTime ret: if (type == SFDataType.DATE) - return SFDataConverter.toDateString(ret, sfResultSetMetaData.dateOutputFormat); + return SFDataConverter.ToDateString(ret, sfResultSetMetaData.dateOutputFormat); break; } return Convert.ToString(value); } - + private void UpdateSessionStatus(QueryExecResponseData responseData) { SFSession session = this.sfStatement.SfSession; session.UpdateSessionProperties(responseData); session.UpdateSessionParameterMap(responseData.parameters); } - + private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferOffset, int length) where T : struct { if (dataOffset < 0) @@ -417,7 +421,7 @@ private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferO if (buffer != null && bufferOffset > buffer.Length) { throw new System.ArgumentException( - "Destination buffer is not long enough. Check the buffer offset, length, and the buffer's lower bounds.", + "Destination buffer is not long enough. Check the buffer offset, length, and the buffer's lower bounds.", nameof(buffer)); } @@ -446,14 +450,14 @@ private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferO "Source data is not long enough. Check the data offset, length, and the data's lower bounds.", nameof(dataOffset)); } - + long dataLength = data.Length - dataOffset; long elementsRead = Math.Min(length, dataLength); Array.Copy(data, dataOffset, buffer, bufferOffset, elementsRead); return elementsRead; - + } - + } } diff --git a/Snowflake.Data/Core/BaseResultChunk.cs b/Snowflake.Data/Core/BaseResultChunk.cs index 37e8fa114..b3b764210 100755 --- a/Snowflake.Data/Core/BaseResultChunk.cs +++ b/Snowflake.Data/Core/BaseResultChunk.cs @@ -9,21 +9,21 @@ namespace Snowflake.Data.Core public abstract class BaseResultChunk : IResultChunk { internal abstract ResultFormat ResultFormat { get; } - + public int RowCount { get; protected set; } - + public int ColumnCount { get; protected set; } - + public int ChunkIndex { get; protected set; } internal int CompressedSize; - + internal int UncompressedSize; internal string Url { get; set; } internal string[,] RowSet { get; set; } - + public int GetRowCount() => RowCount; public int GetChunkIndex() => ChunkIndex; @@ -32,11 +32,11 @@ public abstract class BaseResultChunk : IResultChunk public abstract UTF8Buffer ExtractCell(int rowIndex, int columnIndex); public abstract UTF8Buffer ExtractCell(int columnIndex); - + internal abstract bool Next(); - + internal abstract bool Rewind(); - + internal virtual void Reset(ExecResponseChunk chunkInfo, int chunkIndex) { RowCount = chunkInfo.rowCount; @@ -46,6 +46,15 @@ internal virtual void Reset(ExecResponseChunk chunkInfo, int chunkIndex) UncompressedSize = chunkInfo.uncompressedSize; } + internal virtual void Clear() + { + RowCount = 0; + Url = null; + ChunkIndex = 0; + CompressedSize = 0; + UncompressedSize = 0; + } + internal virtual void ResetForRetry() { } diff --git a/Snowflake.Data/Core/Converter/JsonToStructuredTypeConverter.cs b/Snowflake.Data/Core/Converter/JsonToStructuredTypeConverter.cs index 5d9a51a4b..ddbdf0624 100644 --- a/Snowflake.Data/Core/Converter/JsonToStructuredTypeConverter.cs +++ b/Snowflake.Data/Core/Converter/JsonToStructuredTypeConverter.cs @@ -156,14 +156,25 @@ private static object ConvertToUnstructuredType(FieldMetadata fieldMetadata, Typ { var value = json.Value(); var bytes = Encoding.UTF8.GetBytes(value); + if ((value.Contains("e") || value.Contains("E"))) + { + if (fieldType == typeof(float) || fieldType == typeof(float?)) + { + return float.Parse(value); + } + else + { + return double.Parse(value); + } + } var decimalValue = FastParser.FastParseDecimal(bytes, 0, bytes.Length); if (fieldType == typeof(float) || fieldType == typeof(float?)) { - return (float) decimalValue; + return (float)decimalValue; } if (fieldType == typeof(double) || fieldType == typeof(double?)) { - return (double) decimalValue; + return (double)decimalValue; } return decimalValue; } @@ -413,6 +424,9 @@ private static bool IsArrayMetadata(FieldMetadata fieldMetadata) => internal static bool IsArrayType(string type) => SFDataType.ARRAY.ToString().Equals(type, StringComparison.OrdinalIgnoreCase); + internal static bool IsVectorType(string type) => + SFDataType.VECTOR.ToString().Equals(type, StringComparison.OrdinalIgnoreCase); + private static bool IsVariantMetadata(FieldMetadata fieldMetadata) => SFDataType.VARIANT.ToString().Equals(fieldMetadata.type, StringComparison.OrdinalIgnoreCase); diff --git a/Snowflake.Data/Core/Converter/TimeConverter.cs b/Snowflake.Data/Core/Converter/TimeConverter.cs index 3f1252762..7a95de580 100644 --- a/Snowflake.Data/Core/Converter/TimeConverter.cs +++ b/Snowflake.Data/Core/Converter/TimeConverter.cs @@ -12,15 +12,15 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (timestampType == SFDataType.TIMESTAMP_NTZ) { - var dateTimeUtc = DateTime.Parse(value).ToUniversalTime(); + var dateTimeNoTz = DateTime.Parse(value); if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeUtc; + return dateTimeNoTz; } if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return (DateTimeOffset) dateTimeUtc; + return (DateTimeOffset) DateTime.SpecifyKind(dateTimeNoTz, DateTimeKind.Utc); } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_NTZ into {fieldType} type"); @@ -35,21 +35,21 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeOffset.ToUniversalTime().DateTime.ToUniversalTime(); + return dateTimeOffset.UtcDateTime; } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_TZ into {fieldType} type"); } if (timestampType == SFDataType.TIMESTAMP_LTZ) { - var dateTimeOffset = DateTimeOffset.Parse(value); + var dateTimeOffsetLocal = DateTimeOffset.Parse(value).ToLocalTime(); if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return dateTimeOffset; + return dateTimeOffsetLocal; } if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeOffset.UtcDateTime; + return dateTimeOffsetLocal.LocalDateTime; } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_LTZ into {fieldType} type"); } @@ -63,13 +63,14 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (timestampType == SFDataType.DATE) { - if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) + var dateTime = DateTime.Parse(value); + if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return DateTimeOffset.Parse(value).ToUniversalTime(); + return dateTime; } - if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) + if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return DateTime.Parse(value).ToUniversalTime(); + return (DateTimeOffset) DateTime.SpecifyKind(dateTime, DateTimeKind.Utc); } throw new StructuredTypesReadingException($"Cannot not read DATE into {fieldType} type"); } diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs index 14abaad4a..9e588e921 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs @@ -349,7 +349,7 @@ public void DownloadFile(SFFileMetadata fileMetadata, string fullDstPath, int ma try { // Issue the GET request - WebRequest request = _customWebRequest == null ? FormBaseRequest(fileMetadata, "GET") : _customWebRequest; + WebRequest request = _customWebRequest == null ? FormBaseRequest(fileMetadata, "GET") : _customWebRequest; using (HttpWebResponse response = (HttpWebResponse)request.GetResponse()) { // Write to file @@ -444,7 +444,7 @@ private void HandleDownloadResponse(HttpWebResponse response, SFFileMetadata fil private SFFileMetadata HandleFileHeaderErrForPresignedUrls(WebException ex, SFFileMetadata fileMetadata) { Logger.Error("Failed to get file header for presigned url: " + ex.Message); - + HttpWebResponse response = (HttpWebResponse)ex.Response; if (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || @@ -509,7 +509,11 @@ private SFFileMetadata HandleUploadFileErr(WebException ex, SFFileMetadata fileM fileMetadata.lastError = ex; HttpWebResponse response = (HttpWebResponse)ex.Response; - if (response.StatusCode == HttpStatusCode.BadRequest && GCS_ACCESS_TOKEN != null) + if (response is null) + { + fileMetadata.resultStatus = ResultStatus.ERROR.ToString(); + } + else if (response.StatusCode == HttpStatusCode.BadRequest && GCS_ACCESS_TOKEN != null) { fileMetadata.resultStatus = ResultStatus.RENEW_PRESIGNED_URL.ToString(); } @@ -539,7 +543,11 @@ private SFFileMetadata HandleDownloadFileErr(WebException ex, SFFileMetadata fil fileMetadata.lastError = ex; HttpWebResponse response = (HttpWebResponse)ex.Response; - if (response.StatusCode == HttpStatusCode.Unauthorized) + if (response is null) + { + fileMetadata.resultStatus = ResultStatus.ERROR.ToString(); + } + else if (response.StatusCode == HttpStatusCode.Unauthorized) { fileMetadata.resultStatus = ResultStatus.RENEW_TOKEN.ToString(); } diff --git a/Snowflake.Data/Core/SFBindUploader.cs b/Snowflake.Data/Core/SFBindUploader.cs index 6268c724c..400c3b0c9 100644 --- a/Snowflake.Data/Core/SFBindUploader.cs +++ b/Snowflake.Data/Core/SFBindUploader.cs @@ -251,26 +251,38 @@ internal string GetCSVData(string sType, string sValue) return '"' + sValue.Replace("\"", "\"\"") + '"'; return sValue; case "DATE": - long msFromEpoch = long.Parse(sValue); // SFDateConverter.csharpValToSfVal provides in [ms] from Epoch + long msFromEpoch = long.Parse(sValue); // SFDateConverter.CSharpValToSfVal provides in [ms] from Epoch DateTime date = epoch.AddMilliseconds(msFromEpoch); return date.ToShortDateString(); case "TIME": - long nsSinceMidnight = long.Parse(sValue); // SFDateConverter.csharpValToSfVal provides in [ns] from Midnight + long nsSinceMidnight = long.Parse(sValue); // SFDateConverter.CSharpValToSfVal provides in [ns] from Midnight DateTime time = epoch.AddTicks(nsSinceMidnight/100); return time.ToString("HH:mm:ss.fffffff"); case "TIMESTAMP_LTZ": - long nsFromEpochLtz = long.Parse(sValue); // SFDateConverter.csharpValToSfVal provides in [ns] from Epoch - DateTime ltz = epoch.AddTicks(nsFromEpochLtz/100); + long ticksFromEpochLtz = + long.TryParse(sValue, out var nsLtz) + ? nsLtz / 100 + : (long)(decimal.Parse(sValue) / 100); + + DateTime ltz = epoch.AddTicks(ticksFromEpochLtz); return ltz.ToLocalTime().ToString("O"); // ISO 8601 format case "TIMESTAMP_NTZ": - long nsFromEpochNtz = long.Parse(sValue); // SFDateConverter.csharpValToSfVal provides in [ns] from Epoch - DateTime ntz = epoch.AddTicks(nsFromEpochNtz/100); + long ticksFromEpochNtz = + long.TryParse(sValue, out var nsNtz) + ? nsNtz / 100 + : (long)(decimal.Parse(sValue) / 100); + + DateTime ntz = epoch.AddTicks(ticksFromEpochNtz); return ntz.ToString("yyyy-MM-dd HH:mm:ss.fffffff"); case "TIMESTAMP_TZ": string[] tstzString = sValue.Split(' '); - long nsFromEpochTz = long.Parse(tstzString[0]); // SFDateConverter provides in [ns] from Epoch + long ticksFromEpochTz = + long.TryParse(tstzString[0], out var nsTz) + ? nsTz / 100 + : (long)(decimal.Parse(tstzString[0]) / 100); + int timeZoneOffset = int.Parse(tstzString[1]) - 1440; // SFDateConverter provides in minutes increased by 1440m - DateTime timestamp = epoch.AddTicks(nsFromEpochTz/100).AddMinutes(timeZoneOffset); + DateTime timestamp = epoch.AddTicks(ticksFromEpochTz).AddMinutes(timeZoneOffset); TimeSpan offset = TimeSpan.FromMinutes(timeZoneOffset); DateTimeOffset tzDateTimeOffset = new DateTimeOffset(timestamp.Ticks, offset); return tzDateTimeOffset.ToString("yyyy-MM-dd HH:mm:ss.fffffff zzz"); diff --git a/Snowflake.Data/Core/SFBlockingChunkDownloaderV3.cs b/Snowflake.Data/Core/SFBlockingChunkDownloaderV3.cs index 282c502b1..2e19146aa 100755 --- a/Snowflake.Data/Core/SFBlockingChunkDownloaderV3.cs +++ b/Snowflake.Data/Core/SFBlockingChunkDownloaderV3.cs @@ -1,239 +1,239 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. - */ - -using System; -using System.IO.Compression; -using System.IO; -using System.Collections; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using System.Net.Http; -using Newtonsoft.Json; -using System.Diagnostics; -using Newtonsoft.Json.Serialization; -using Snowflake.Data.Log; - -namespace Snowflake.Data.Core -{ - class SFBlockingChunkDownloaderV3 : IChunkDownloader - { - static private SFLogger logger = SFLoggerFactory.GetLogger(); - - private List chunkDatas = new List(); - - private string qrmk; - - private int nextChunkToDownloadIndex; - - private int nextChunkToConsumeIndex; - - // External cancellation token, used to stop donwload - private CancellationToken externalCancellationToken; - - private readonly int prefetchSlot; - - private readonly IRestRequester _RestRequester; - - private readonly SFSessionProperties sessionProperies; - - private Dictionary chunkHeaders; - - private readonly SFBaseResultSet ResultSet; - - private readonly List chunkInfos; - - private readonly List> taskQueues; - - public SFBlockingChunkDownloaderV3(int colCount, - List chunkInfos, string qrmk, - Dictionary chunkHeaders, - CancellationToken cancellationToken, - SFBaseResultSet ResultSet, - ResultFormat resultFormat) - { - this.qrmk = qrmk; - this.chunkHeaders = chunkHeaders; - this.nextChunkToDownloadIndex = 0; - this.ResultSet = ResultSet; - this._RestRequester = ResultSet.sfStatement.SfSession.restRequester; - this.sessionProperies = ResultSet.sfStatement.SfSession.properties; - this.prefetchSlot = Math.Min(chunkInfos.Count, GetPrefetchThreads(ResultSet)); - this.chunkInfos = chunkInfos; - this.nextChunkToConsumeIndex = 0; - this.taskQueues = new List>(); - externalCancellationToken = cancellationToken; - - for (int i=0; i sessionParameters = resultSet.sfStatement.SfSession.ParameterMap; - String val = (String)sessionParameters[SFSessionParameter.CLIENT_PREFETCH_THREADS]; - return Int32.Parse(val); - } - - public async Task GetNextChunkAsync() - { - logger.Info($"NextChunkToConsume: {nextChunkToConsumeIndex}, NextChunkToDownload: {nextChunkToDownloadIndex}"); - if (nextChunkToConsumeIndex < chunkInfos.Count) - { - Task chunk = taskQueues[nextChunkToConsumeIndex % prefetchSlot]; - - if (nextChunkToDownloadIndex < chunkInfos.Count && nextChunkToConsumeIndex > 0) - { - BaseResultChunk reusableChunk = chunkDatas[nextChunkToDownloadIndex % prefetchSlot]; - reusableChunk.Reset(chunkInfos[nextChunkToDownloadIndex], nextChunkToDownloadIndex); - - taskQueues[nextChunkToDownloadIndex % prefetchSlot] = DownloadChunkAsync(new DownloadContextV3() - { - chunk = reusableChunk, - qrmk = this.qrmk, - chunkHeaders = this.chunkHeaders, - cancellationToken = externalCancellationToken - }); - nextChunkToDownloadIndex++; - - // in case of one slot we need to return the chunk already downloaded - if (prefetchSlot == 1) - { - chunk = taskQueues[0]; - } - } - nextChunkToConsumeIndex++; - return await chunk; - } - else - { - return await Task.FromResult(null); - } - } - - private async Task DownloadChunkAsync(DownloadContextV3 downloadContext) - { - BaseResultChunk chunk = downloadContext.chunk; - int backOffInSec = 1; - bool retry = false; - int retryCount = 0; - int maxRetry = int.Parse(sessionProperies[SFSessionProperty.MAXHTTPRETRIES]); - - do - { - retry = false; - - S3DownloadRequest downloadRequest = - new S3DownloadRequest() - { - Url = new UriBuilder(chunk.Url).Uri, - qrmk = downloadContext.qrmk, - // s3 download request timeout to one hour - RestTimeout = TimeSpan.FromHours(1), - HttpTimeout = Timeout.InfiniteTimeSpan, // Disable timeout for each request - chunkHeaders = downloadContext.chunkHeaders, - sid = ResultSet.sfStatement.SfSession.sessionId - }; - - using (var httpResponse = await _RestRequester.GetAsync(downloadRequest, downloadContext.cancellationToken) - .ConfigureAwait(continueOnCapturedContext: false)) - using (Stream stream = await httpResponse.Content.ReadAsStreamAsync() - .ConfigureAwait(continueOnCapturedContext: false)) - { - // retry on chunk downloading since the retry logic in HttpClient.RetryHandler - // doesn't cover this. The GET request could be succeeded but network error - // still could happen during reading chunk data from stream and that needs - // retry as well. - try - { - IEnumerable encoding; - if (httpResponse.Content.Headers.TryGetValues("Content-Encoding", out encoding)) - { - if (String.Compare(encoding.First(), "gzip", true) == 0) - { - using (Stream streamGzip = new GZipStream(stream, CompressionMode.Decompress)) - { - await ParseStreamIntoChunk(streamGzip, chunk).ConfigureAwait(false); - } - } - else - { - await ParseStreamIntoChunk(stream, chunk).ConfigureAwait(false); - } - } - else - { - await ParseStreamIntoChunk(stream, chunk).ConfigureAwait(false); - } - } - catch (Exception e) - { - if ((maxRetry <= 0) || (retryCount < maxRetry)) - { - logger.Debug($"Retry {retryCount}/{maxRetry} of parse stream to chunk error: " + e.Message); - retry = true; - // reset the chunk before retry in case there could be garbage - // data left from last attempt - chunk.ResetForRetry(); - await Task.Delay(TimeSpan.FromSeconds(backOffInSec), downloadContext.cancellationToken).ConfigureAwait(false); - ++retryCount; - // Set next backoff time - backOffInSec = backOffInSec * 2; - if (backOffInSec > HttpUtil.MAX_BACKOFF) - { - backOffInSec = HttpUtil.MAX_BACKOFF; - } - } - else - { - //parse error - logger.Error("Failed retries of parse stream to chunk error: " + e.Message); - throw new Exception("Parse stream to chunk error: " + e.Message); - } - } - } - } while (retry); - logger.Info($"Succeed downloading chunk #{chunk.ChunkIndex}"); - return chunk; - } - - private async Task ParseStreamIntoChunk(Stream content, BaseResultChunk resultChunk) - { - IChunkParser parser = ChunkParserFactory.Instance.GetParser(resultChunk.ResultFormat, content); - await parser.ParseChunk(resultChunk); - } - } - - class DownloadContextV3 - { - public BaseResultChunk chunk { get; set; } - - public string qrmk { get; set; } - - public Dictionary chunkHeaders { get; set; } - - public CancellationToken cancellationToken { get; set; } - } -} +/* + * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. + */ + +using System; +using System.IO.Compression; +using System.IO; +using System.Collections; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Net.Http; +using Newtonsoft.Json; +using System.Diagnostics; +using Newtonsoft.Json.Serialization; +using Snowflake.Data.Log; + +namespace Snowflake.Data.Core +{ + class SFBlockingChunkDownloaderV3 : IChunkDownloader + { + static private SFLogger logger = SFLoggerFactory.GetLogger(); + + private List chunkDatas = new List(); + + private string qrmk; + + private int nextChunkToDownloadIndex; + + private int nextChunkToConsumeIndex; + + // External cancellation token, used to stop donwload + private CancellationToken externalCancellationToken; + + private readonly int prefetchSlot; + + private readonly IRestRequester _RestRequester; + + private readonly SFSessionProperties sessionProperies; + + private Dictionary chunkHeaders; + + private readonly SFBaseResultSet ResultSet; + + private readonly List chunkInfos; + + private readonly List> taskQueues; + + public SFBlockingChunkDownloaderV3(int colCount, + List chunkInfos, string qrmk, + Dictionary chunkHeaders, + CancellationToken cancellationToken, + SFBaseResultSet ResultSet, + ResultFormat resultFormat) + { + this.qrmk = qrmk; + this.chunkHeaders = chunkHeaders; + this.nextChunkToDownloadIndex = 0; + this.ResultSet = ResultSet; + this._RestRequester = ResultSet.sfStatement.SfSession.restRequester; + this.sessionProperies = ResultSet.sfStatement.SfSession.properties; + this.prefetchSlot = Math.Min(chunkInfos.Count, GetPrefetchThreads(ResultSet)); + this.chunkInfos = chunkInfos; + this.nextChunkToConsumeIndex = 0; + this.taskQueues = new List>(); + externalCancellationToken = cancellationToken; + + for (int i=0; i sessionParameters = resultSet.sfStatement.SfSession.ParameterMap; + String val = (String)sessionParameters[SFSessionParameter.CLIENT_PREFETCH_THREADS]; + return Int32.Parse(val); + } + + public async Task GetNextChunkAsync() + { + logger.Info($"NextChunkToConsume: {nextChunkToConsumeIndex}, NextChunkToDownload: {nextChunkToDownloadIndex}"); + if (nextChunkToConsumeIndex < chunkInfos.Count) + { + Task chunk = taskQueues[nextChunkToConsumeIndex % prefetchSlot]; + + if (nextChunkToDownloadIndex < chunkInfos.Count && nextChunkToConsumeIndex > 0) + { + BaseResultChunk reusableChunk = chunkDatas[nextChunkToDownloadIndex % prefetchSlot]; + reusableChunk.Reset(chunkInfos[nextChunkToDownloadIndex], nextChunkToDownloadIndex); + + taskQueues[nextChunkToDownloadIndex % prefetchSlot] = DownloadChunkAsync(new DownloadContextV3() + { + chunk = reusableChunk, + qrmk = this.qrmk, + chunkHeaders = this.chunkHeaders, + cancellationToken = externalCancellationToken + }); + nextChunkToDownloadIndex++; + + // in case of one slot we need to return the chunk already downloaded + if (prefetchSlot == 1) + { + chunk = taskQueues[0]; + } + } + nextChunkToConsumeIndex++; + return await chunk; + } + else + { + return await Task.FromResult(null); + } + } + + private async Task DownloadChunkAsync(DownloadContextV3 downloadContext) + { + BaseResultChunk chunk = downloadContext.chunk; + int backOffInSec = 1; + bool retry = false; + int retryCount = 0; + int maxRetry = int.Parse(sessionProperies[SFSessionProperty.MAXHTTPRETRIES]); + + do + { + retry = false; + + S3DownloadRequest downloadRequest = + new S3DownloadRequest() + { + Url = new UriBuilder(chunk.Url).Uri, + qrmk = downloadContext.qrmk, + // s3 download request timeout to one hour + RestTimeout = TimeSpan.FromHours(1), + HttpTimeout = Timeout.InfiniteTimeSpan, // Disable timeout for each request + chunkHeaders = downloadContext.chunkHeaders, + sid = ResultSet.sfStatement.SfSession.sessionId + }; + + using (var httpResponse = await _RestRequester.GetAsync(downloadRequest, downloadContext.cancellationToken) + .ConfigureAwait(continueOnCapturedContext: false)) + using (Stream stream = await httpResponse.Content.ReadAsStreamAsync() + .ConfigureAwait(continueOnCapturedContext: false)) + { + // retry on chunk downloading since the retry logic in HttpClient.RetryHandler + // doesn't cover this. The GET request could be succeeded but network error + // still could happen during reading chunk data from stream and that needs + // retry as well. + try + { + IEnumerable encoding; + if (httpResponse.Content.Headers.TryGetValues("Content-Encoding", out encoding)) + { + if (String.Compare(encoding.First(), "gzip", true) == 0) + { + using (Stream streamGzip = new GZipStream(stream, CompressionMode.Decompress)) + { + await ParseStreamIntoChunk(streamGzip, chunk).ConfigureAwait(false); + } + } + else + { + await ParseStreamIntoChunk(stream, chunk).ConfigureAwait(false); + } + } + else + { + await ParseStreamIntoChunk(stream, chunk).ConfigureAwait(false); + } + } + catch (Exception e) + { + if ((maxRetry <= 0) || (retryCount < maxRetry)) + { + logger.Debug($"Retry {retryCount}/{maxRetry} of parse stream to chunk error: " + e.Message); + retry = true; + // reset the chunk before retry in case there could be garbage + // data left from last attempt + chunk.ResetForRetry(); + await Task.Delay(TimeSpan.FromSeconds(backOffInSec), downloadContext.cancellationToken).ConfigureAwait(false); + ++retryCount; + // Set next backoff time + backOffInSec = backOffInSec * 2; + if (backOffInSec > HttpUtil.MAX_BACKOFF) + { + backOffInSec = HttpUtil.MAX_BACKOFF; + } + } + else + { + //parse error + logger.Error("Failed retries of parse stream to chunk error: " + e.Message); + throw new Exception("Parse stream to chunk error: " + e.Message); + } + } + } + } while (retry); + logger.Info($"Succeed downloading chunk #{chunk.ChunkIndex}"); + return chunk; + } + + private async Task ParseStreamIntoChunk(Stream content, BaseResultChunk resultChunk) + { + IChunkParser parser = ChunkParserFactory.Instance.GetParser(resultChunk.ResultFormat, content); + await parser.ParseChunk(resultChunk); + } + } + + class DownloadContextV3 + { + public BaseResultChunk chunk { get; set; } + + public string qrmk { get; set; } + + public Dictionary chunkHeaders { get; set; } + + public CancellationToken cancellationToken { get; set; } + } +} diff --git a/Snowflake.Data/Core/SFDataConverter.cs b/Snowflake.Data/Core/SFDataConverter.cs index 2300a26bb..619976400 100755 --- a/Snowflake.Data/Core/SFDataConverter.cs +++ b/Snowflake.Data/Core/SFDataConverter.cs @@ -1,4 +1,4 @@ -/* +/* * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. */ @@ -14,7 +14,7 @@ namespace Snowflake.Data.Core public enum SFDataType { None, FIXED, REAL, TEXT, DATE, VARIANT, TIMESTAMP_LTZ, TIMESTAMP_NTZ, - TIMESTAMP_TZ, OBJECT, BINARY, TIME, BOOLEAN, ARRAY, MAP + TIMESTAMP_TZ, OBJECT, BINARY, TIME, BOOLEAN, ARRAY, MAP, VECTOR } static class SFDataConverter @@ -152,12 +152,12 @@ private static DateTime ConvertToDateTime(UTF8Buffer srcVal, SFDataType srcType) { case SFDataType.DATE: long srcValLong = FastParser.FastParseInt64(srcVal.Buffer, srcVal.offset, srcVal.length); - return UnixEpoch.AddDays(srcValLong); + return DateTime.SpecifyKind(UnixEpoch.AddDays(srcValLong), DateTimeKind.Unspecified); case SFDataType.TIME: case SFDataType.TIMESTAMP_NTZ: var tickDiff = GetTicksFromSecondAndNanosecond(srcVal); - return UnixEpoch.AddTicks(tickDiff); + return DateTime.SpecifyKind(UnixEpoch.AddTicks(tickDiff), DateTimeKind.Unspecified); default: throw new SnowflakeDbException(SFError.INVALID_DATA_CONVERSION, srcVal, srcType, typeof(DateTime)); @@ -240,7 +240,7 @@ private static long GetTicksFromSecondAndNanosecond(UTF8Buffer srcVal) } - internal static Tuple csharpTypeValToSfTypeVal(DbType srcType, object srcVal) + internal static Tuple CSharpTypeValToSfTypeVal(DbType srcType, object srcVal) { SFDataType destType; string destVal; @@ -300,7 +300,7 @@ internal static Tuple csharpTypeValToSfTypeVal(DbType srcType, o default: throw new SnowflakeDbException(SFError.UNSUPPORTED_DOTNET_TYPE, srcType); } - destVal = csharpValToSfVal(destType, srcVal); + destVal = CSharpValToSfVal(destType, srcVal); return Tuple.Create(destType.ToString(), destVal); } @@ -323,7 +323,7 @@ internal static byte[] HexToBytes(string hex) return bytes; } - internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) + internal static string CSharpValToSfVal(SFDataType sfDataType, object srcVal) { string destVal = null; @@ -331,18 +331,6 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) { switch (sfDataType) { - case SFDataType.TIMESTAMP_LTZ: - if (srcVal.GetType() != typeof(DateTimeOffset)) - { - throw new SnowflakeDbException(SFError.INVALID_DATA_CONVERSION, srcVal, - srcVal.GetType().ToString(), SFDataType.TIMESTAMP_LTZ.ToString()); - } - else - { - destVal = ((long)(((DateTimeOffset)srcVal).UtcTicks - UnixEpoch.Ticks) * 100).ToString(); - } - break; - case SFDataType.FIXED: case SFDataType.BOOLEAN: case SFDataType.REAL: @@ -359,9 +347,8 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) else { DateTime srcDt = ((DateTime)srcVal); - long nanoSinceMidNight = (long)(srcDt.Ticks - srcDt.Date.Ticks) * 100L; - - destVal = nanoSinceMidNight.ToString(); + var tickDiff = srcDt.Ticks - srcDt.Date.Ticks; + destVal = TicksToNanoSecondsString(tickDiff); } break; @@ -380,6 +367,19 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) } break; + case SFDataType.TIMESTAMP_LTZ: + if (srcVal.GetType() != typeof(DateTimeOffset)) + { + throw new SnowflakeDbException(SFError.INVALID_DATA_CONVERSION, srcVal, + srcVal.GetType().ToString(), SFDataType.TIMESTAMP_LTZ.ToString()); + } + else + { + var tickDiff = ((DateTimeOffset)srcVal).UtcTicks - UnixEpoch.Ticks; + destVal = TicksToNanoSecondsString(tickDiff); + } + break; + case SFDataType.TIMESTAMP_NTZ: if (srcVal.GetType() != typeof(DateTime)) { @@ -391,7 +391,7 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) DateTime srcDt = (DateTime)srcVal; var diff = srcDt.Subtract(UnixEpoch); var tickDiff = diff.Ticks; - destVal = $"{tickDiff}00"; // Cannot multiple tickDiff by 100 because long might overflow. + destVal = TicksToNanoSecondsString(tickDiff); } break; @@ -404,8 +404,8 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) else { DateTimeOffset dtOffset = (DateTimeOffset)srcVal; - destVal = String.Format("{0} {1}", (dtOffset.UtcTicks - UnixEpoch.Ticks) * 100L, - dtOffset.Offset.TotalMinutes + 1440); + var tickDiff = dtOffset.UtcTicks - UnixEpoch.Ticks; + destVal = $"{TicksToNanoSecondsString(tickDiff)} {dtOffset.Offset.TotalMinutes + 1440}"; } break; @@ -429,7 +429,9 @@ internal static string csharpValToSfVal(SFDataType sfDataType, object srcVal) return destVal; } - internal static string toDateString(DateTime date, string formatter) + private static string TicksToNanoSecondsString(long tickDiff) => tickDiff == 0 ? "0" : $"{tickDiff}00"; + + internal static string ToDateString(DateTime date, string formatter) { // change formatter from "YYYY-MM-DD" to "yyyy-MM-dd" formatter = formatter.Replace("Y", "y").Replace("m", "M").Replace("D", "d"); diff --git a/Snowflake.Data/Core/SFResultSet.cs b/Snowflake.Data/Core/SFResultSet.cs index a7586f2c3..e81db8c14 100755 --- a/Snowflake.Data/Core/SFResultSet.cs +++ b/Snowflake.Data/Core/SFResultSet.cs @@ -283,7 +283,7 @@ internal override string GetString(int ordinal) var val = GetValue(ordinal); if (val == DBNull.Value) return null; - return SFDataConverter.toDateString((DateTime)val, sfResultSetMetaData.dateOutputFormat); + return SFDataConverter.ToDateString((DateTime)val, sfResultSetMetaData.dateOutputFormat); default: return GetObjectInternal(ordinal).SafeToString(); diff --git a/Snowflake.Data/Core/SFResultSetMetaData.cs b/Snowflake.Data/Core/SFResultSetMetaData.cs index bae97d221..bb5f2e819 100755 --- a/Snowflake.Data/Core/SFResultSetMetaData.cs +++ b/Snowflake.Data/Core/SFResultSetMetaData.cs @@ -1,4 +1,4 @@ -/* +/* * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. */ @@ -130,7 +130,7 @@ private SFDataType GetSFDataType(string type) return rslt; throw new SnowflakeDbException(SFError.INTERNAL_ERROR, - $"Unknow column type: {type}"); + $"Unknown column type: {type}"); } private Type GetNativeTypeForColumn(SFDataType sfType, ExecResponseRowType col) @@ -145,6 +145,7 @@ private Type GetNativeTypeForColumn(SFDataType sfType, ExecResponseRowType col) case SFDataType.VARIANT: case SFDataType.OBJECT: case SFDataType.ARRAY: + case SFDataType.VECTOR: case SFDataType.MAP: return typeof(string); case SFDataType.DATE: @@ -160,7 +161,7 @@ private Type GetNativeTypeForColumn(SFDataType sfType, ExecResponseRowType col) return typeof(bool); default: throw new SnowflakeDbException(SFError.INTERNAL_ERROR, - $"Unknow column type: {sfType}"); + $"Unknown column type: {sfType}"); } } diff --git a/Snowflake.Data/Core/SFReusableChunk.cs b/Snowflake.Data/Core/SFReusableChunk.cs index 06ea7cef3..4db8ec0d7 100755 --- a/Snowflake.Data/Core/SFReusableChunk.cs +++ b/Snowflake.Data/Core/SFReusableChunk.cs @@ -11,8 +11,8 @@ namespace Snowflake.Data.Core class SFReusableChunk : BaseResultChunk { internal override ResultFormat ResultFormat => ResultFormat.JSON; - - private readonly BlockResultData data; + + internal readonly BlockResultData data; private int _currentRowIndex = -1; @@ -29,11 +29,18 @@ internal override void Reset(ExecResponseChunk chunkInfo, int chunkIndex) data.Reset(RowCount, ColumnCount, chunkInfo.uncompressedSize); } + internal override void Clear() + { + base.Clear(); + _currentRowIndex = -1; + data.Clear(); + } + internal override void ResetForRetry() { data.ResetForRetry(); } - + [Obsolete("ExtractCell with rowIndex is deprecated", false)] public override UTF8Buffer ExtractCell(int rowIndex, int columnIndex) { @@ -62,21 +69,22 @@ internal override bool Next() _currentRowIndex += 1; return _currentRowIndex < RowCount; } - + internal override bool Rewind() { _currentRowIndex -= 1; return _currentRowIndex >= 0; } - private class BlockResultData + internal class BlockResultData { private static readonly int NULL_VALUE = -100; - private int blockCount; - private static int blockLengthBits = 24; + internal int blockCount; + private static int blockLengthBits = 23; private static int blockLength = 1 << blockLengthBits; - int metaBlockCount; + + internal int metaBlockCount; private static int metaBlockLengthBits = 15; private static int metaBlockLength = 1 << metaBlockLengthBits; @@ -98,11 +106,24 @@ internal void Reset(int rowCount, int colCount, int uncompressedSize) savedColCount = colCount; currentDatOffset = 0; nextIndex = 0; - int bytesNeeded = uncompressedSize - (rowCount * 2) - (rowCount * colCount); - this.blockCount = getBlock(bytesNeeded - 1) + 1; + this.blockCount = 1; // init with 1 block only this.metaBlockCount = getMetaBlock(rowCount * colCount - 1) + 1; } + internal void Clear() + { + savedRowCount = 0; + savedColCount = 0; + currentDatOffset = 0; + nextIndex = 0; + blockCount = 0; + metaBlockCount = 0; + + data.Clear(); + offsets.Clear(); + lengths.Clear(); + } + internal void ResetForRetry() { currentDatOffset = 0; @@ -157,6 +178,16 @@ int copySize public void add(byte[] bytes, int length) { + // check if a new block for data is needed + if (getBlock(currentDatOffset) == blockCount - 1) + { + var neededSize = length - spaceLeftOnBlock(currentDatOffset); + while (neededSize >= 0) + { + blockCount++; + neededSize -= blockLength; + } + } if (data.Count < blockCount || offsets.Count < metaBlockCount) { allocateArrays(); @@ -232,12 +263,12 @@ private void allocateArrays() { while (data.Count < blockCount) { - data.Add(new byte[1 << blockLengthBits]); + data.Add(new byte[blockLength]); } while (offsets.Count < metaBlockCount) { - offsets.Add(new int[1 << metaBlockLengthBits]); - lengths.Add(new int[1 << metaBlockLengthBits]); + offsets.Add(new int[metaBlockLength]); + lengths.Add(new int[metaBlockLength]); } } } diff --git a/Snowflake.Data/Core/SFStatement.cs b/Snowflake.Data/Core/SFStatement.cs index e84690d54..146a10130 100644 --- a/Snowflake.Data/Core/SFStatement.cs +++ b/Snowflake.Data/Core/SFStatement.cs @@ -383,11 +383,14 @@ internal async Task ExecuteAsync(int timeout, string sql, Dicti SFBindUploader uploader = new SFBindUploader(SfSession, _requestId); await uploader.UploadAsync(bindings, cancellationToken).ConfigureAwait(false); _bindStage = uploader.getStagePath(); - ClearQueryRequestId(); } catch (Exception e) { - logger.Warn("Exception encountered trying to upload binds to stage. Attaching binds in payload instead. {0}", e); + logger.Warn("Exception encountered trying to upload binds to stage. Attaching binds in payload instead. Exception: " + e.Message); + } + finally + { + ClearQueryRequestId(); } } @@ -532,13 +535,14 @@ private SFBaseResultSet ExecuteSqlOtherThanPutGet(int timeout, string sql, Dicti SFBindUploader uploader = new SFBindUploader(SfSession, _requestId); uploader.Upload(bindings); _bindStage = uploader.getStagePath(); - ClearQueryRequestId(); } catch (Exception e) { - logger.Warn( - "Exception encountered trying to upload binds to stage. Attaching binds in payload instead. {0}", - e); + logger.Warn("Exception encountered trying to upload binds to stage. Attaching binds in payload instead. Exception: " + e.Message); + } + finally + { + ClearQueryRequestId(); } } diff --git a/doc/StructuredTypes.md b/doc/StructuredTypes.md new file mode 100644 index 000000000..bc45d98c9 --- /dev/null +++ b/doc/StructuredTypes.md @@ -0,0 +1,185 @@ +## Concept + +Snowflake structured types documentation is available here: [Snowflake Structured Types Documentation](https://docs.snowflake.com/en/sql-reference/data-types-structured). + +Snowflake offers a way to store structured types which can be: +- objects, e.g. ```OBJECT(city VARCHAR, state VARCHAR)``` +- arrays, e.g. ```ARRAY(NUMBER)``` +- maps, e.g. ```MAP(VARCHAR, VARCHAR)``` + +The driver allows reading and casting such structured objects into customer classes. + +**Note**: Currently, reading structured types is available only for JSON result format. + +## Enabling the feature + +Currently, reading structured types is available only for JSON result format, so you can make sure you are using JSON result format by: +```sql +ALTER SESSION SET DOTNET_QUERY_RESULT_FORMAT = JSON; +``` + +The structured types feature is enabled starting from v4.2.0 driver version. + +## Structured types vs semi-structured types + +The difference between structured types and semi-structured types is that structured types contain types definitions for given objects/arrays/maps. + +E.g. for a given object: +```sql +SELECT OBJECT_CONSTRUCT('city','San Mateo', 'state', 'CA')::OBJECT(city VARCHAR, state VARCHAR) +``` + +The part indicating the type of object is `::OBJECT(city VARCHAR, state VARCHAR)`. +This part of definition is essential for structured types because it is used to convert the object into the customer class instance. + +Whereas the corresponding semi-structured type does not contain a detailed type definition, for instance: +```sql +SELECT OBJECT_CONSTRUCT('city','San Mateo', 'state', 'CA')::OBJECT +``` + +which means the semi-structured types are returned only as a JSON string. + +## Handling objects + +You can construct structured objects by using an object constructor and providing type details: + +```sql +SELECT OBJECT_CONSTRUCT('city','San Mateo', 'state', 'CA')::OBJECT(city VARCHAR, state VARCHAR) +``` + +You can read the object into your class by executing `T SnowflakeDbReader.GetObject(int ordinal)` method: + +```csharp +var reader = (SnowflakeDbDataReader) command.ExecuteReader(); +Assert.IsTrue(reader.Read()); +var address = reader.GetObject
(0); +``` + +where `Address` is a customer class, e.g. +```csharp +public class Address +{ + public string city { get; set; } + public string state { get; set; } + public Zip zip { get; set; } +} +``` + +There are a few possible ways of constructing an object of a customer class. +The customer object (e.g. `Address`) can be created either: +- by the properties order, which is a default method +- by properties names +- by the constructor. + +### Creating objects by properties order + +Creating objects by properties order is a default construction method. +Objects are created by the non-parametrized constructor, and then the n-th Snowflake object field is converted into the n-th customer object property, one by one. + +You can annotate your class with `SnowflakeObject` annotation to make sure this creation method would be chosen (however it is not necessary since it is a default method): +```csharp +[SnowflakeObject(ConstructionMethod = SnowflakeObjectConstructionMethod.PROPERTIES_ORDER)] +public class Address +{ + public string city { get; set; } + public string state { get; set; } + public Zip zip { get; set; } +} +``` + +If you would like to skip any customer property, you could use a `[SnowflakeColumn(IgnoreForPropertyOrder = true)]` annotation for a given property. +For instance, the annotation used in the following class definition makes the `city` be skipped when mapping the properties: +```csharp +public class Address +{ + [SnowflakeColumn(IgnoreForPropertyOrder = true)] + public string city { get; set; } + public string state { get; set; } + public Zip zip { get; set; } +} +``` + +So, the first field from the database object would be mapped to the `state` property because `city` is skipped. + +### Creating objects by property names + +Using the `[SnowflakeObject(ConstructionMethod = SnowflakeObjectConstructionMethod.PROPERTIES_NAMES)]` annotation on the customer class can enable the creation of objects by their property names. +In this creation method, objects are created by the non-parametrised constructor, and then for each of the database object fields a property of the same name is set with the field value. +It is crucial that database object field names are the same as customer property names; otherwise, a given database object field value would not be set in the customer object. +You can use the annotation `SnowflakeColumn` to rename the customer object property to the match database object field name. + +In the example: + +```csharp +[SnowflakeObject(ConstructionMethod = SnowflakeObjectConstructionMethod.PROPERTIES_NAMES)] +public class Address +{ + [SnowflakeColumn(Name = "nearestCity")] + public string city { get; set; } + public string state { get; set; } + public Zip zip { get; set; } +} +``` + +the database object field `nearestCity` would be mapped to the `city` property of `Address` class. + +### Creating objects by the constructor + +Using the `[SnowflakeObject(ConstructionMethod = SnowflakeObjectConstructionMethod.CONSTRUCTOR)]` annotation on the customer class enables the creation of objects by a constructor. +In this creation method, an object with all its fields is created by a constructor. +A constructor with the exact number of parameters as the number of database object fields should exist because such a constructor would be chosen to instantiate a customer object. +Database object fields are mapped to customer object constructor parameters based on their order. + +Example: +```csharp +[SnowflakeObject(ConstructionMethod = SnowflakeObjectConstructionMethod.CONSTRUCTOR)] +public class Address +{ + private string _city; + private string _state; + + public Address() + { + } + + public Address(string city, string state) + { + _city = city; + _state = state; + } +} +``` + +## Handling arrays + +You can construct structured arrays like this: + +```sql +SELECT ARRAY_CONSTRUCT('a', 'b', 'c')::ARRAY(TEXT) +``` + +You can read such a structured array using `T[] SnowflakeDbReader.GetArray(int ordinal)` method to get an array of specified type. + +```csharp +var reader = (SnowflakeDbDataReader) command.ExecuteReader(); +Assert.IsTrue(reader.Read()); +var array = reader.GetArray(0); +``` + +## Handling maps + +You can construct structured maps like this: + +```sql +SELECT OBJECT_CONSTRUCT('5','San Mateo', '8', 'CA', '13', '01-234')::MAP(INTEGER, VARCHAR) +``` + +**Note**: The only possible map key types are: VARCHAR or NUMBER with scale 0. + +You can read a structured map using `Dictionary SnowflakeDbReader.GetMap(int ordinal)` method to get an array of specified type. + +```csharp +var reader = (SnowflakeDbDataReader) command.ExecuteReader(); +Assert.IsTrue(reader.Read()); +var map = reader.GetMap(0); +``` diff --git a/doc/VectorType.md b/doc/VectorType.md new file mode 100644 index 000000000..fcf3cdaa4 --- /dev/null +++ b/doc/VectorType.md @@ -0,0 +1,18 @@ +# Vector type + +Vector type represents an array of either integer or float type and a fixed size. +Examples: +- `[4, 5, 6]::VECTOR(INT, 3)` is a 3 elements vector of integers +- `[1.1, 2.2]::VECTOR(FLOAT, 2)` is a 2 elements vector of floats + +More about vectors you can read here: [Vector data types](https://docs.snowflake.com/en/sql-reference/data-types-vector). + +The driver allows to read a vector column into `int[]` or `float[]` arrays by calling `T[] SnowflakeDbReader.GetArray(int ordinal)` +method for either int or float types. + +```csharp +var reader = (SnowflakeDbDataReader) command.ExecuteReader(); +Assert.IsTrue(reader.Read()); +int[] intVector = reader.GetArray(0); +float[] floatVector = reader.GetArray(1); +```