diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b61f7a4c8..e12a0a6d6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,7 +31,7 @@ jobs: strategy: fail-fast: false matrix: - dotnet: ['net6.0', 'net7.0', 'net8.0'] + dotnet: ['net6.0', 'net7.0', 'net8.0', 'net9.0'] cloud_env: ['AZURE', 'GCP', 'AWS'] target_framework: ['netstandard2.0', 'netstandard2.1'] steps: @@ -43,6 +43,7 @@ jobs: dotnet-version: | 6.0.x 8.0.x + 9.0.x dotnet-quality: 'ga' - name: Setup Python uses: actions/setup-python@v5 @@ -109,6 +110,7 @@ jobs: dotnet-version: | 6.0.x 8.0.x + 9.0.x dotnet-quality: 'ga' - name: Setup Python uses: actions/setup-python@v5 @@ -163,7 +165,7 @@ jobs: strategy: fail-fast: false matrix: - dotnet: ['net6.0', 'net7.0', 'net8.0'] + dotnet: ['net6.0', 'net7.0', 'net8.0', 'net9.0'] cloud_env: ['AZURE', 'GCP', 'AWS'] target_framework: ['netstandard2.0', 'netstandard2.1'] steps: @@ -174,6 +176,7 @@ jobs: dotnet-version: | 6.0.x 8.0.x + 9.0.x dotnet-quality: 'ga' - name: Setup Python uses: actions/setup-python@v5 @@ -226,7 +229,7 @@ jobs: strategy: fail-fast: false matrix: - dotnet: ['net6.0', 'net7.0', 'net8.0'] + dotnet: ['net6.0', 'net7.0', 'net8.0', 'net9.0'] cloud_env: ['AZURE', 'GCP', 'AWS'] target_framework: ['netstandard2.0', 'netstandard2.1'] steps: @@ -237,6 +240,7 @@ jobs: dotnet-version: | 6.0.x 8.0.x + 9.0.x dotnet-quality: 'ga' - name: Setup Python uses: actions/setup-python@v5 diff --git a/Jenkinsfile b/Jenkinsfile index 2870cb1e5..df829df1c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,7 +28,7 @@ timestamps { string(name: 'branch', value: 'main'), string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT), string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH), - string(name: 'TARGET_DOCKER_TEST_IMAGE', value: 'dotnet-ubuntu204-net8'), + string(name: 'TARGET_DOCKER_TEST_IMAGE', value: 'dotnet-ubuntu204-net9'), string(name: 'parent_job', value: env.JOB_NAME), string(name: 'parent_build_number', value: env.BUILD_NUMBER) ] diff --git a/README.md b/README.md index 682475df3..12e52a056 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ [![NuGet](https://img.shields.io/nuget/v/Snowflake.Data.svg)](https://www.nuget.org/packages/Snowflake.Data/) [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) -The Snowflake .NET connector supports the the following .NET framework and libraries versions: +The Snowflake .NET connector supports the following .NET framework and libraries versions: - .NET Framework 4.6.2 - .NET Framework 4.7.1 @@ -14,6 +14,7 @@ The Snowflake .NET connector supports the the following .NET framework and libra - .NET 6.0 - .NET 7.0 - .NET 8.0 +- .NET 9.0 Disclaimer: While the connector targets netstandard2.0 and may work with versions in its [support matrix](https://learn.microsoft.com/en-us/dotnet/standard/net-standard?tabs=net-standard-2-0#select-net-standard-version), only the versions listed above are supported and tested by the connector diff --git a/Snowflake.Data.Tests/Snowflake.Data.Tests.csproj b/Snowflake.Data.Tests/Snowflake.Data.Tests.csproj index ce61f5904..105e5b2a1 100644 --- a/Snowflake.Data.Tests/Snowflake.Data.Tests.csproj +++ b/Snowflake.Data.Tests/Snowflake.Data.Tests.csproj @@ -1,7 +1,7 @@ - net6.0;net7.0;net8.0;net462;net471;net472;net48;net481 - net6.0;net7.0;net8.0; + net6.0;net7.0;net8.0;net9.0;net462;net471;net472;net48;net481 + net6.0;net7.0;net8.0;net9.0 Snowflake.Data.Tests Snowflake Connector for .NET Snowflake Computing, Inc diff --git a/Snowflake.Data.Tests/UnitTests/PutGetStageInfoTest.cs b/Snowflake.Data.Tests/UnitTests/PutGetStageInfoTest.cs new file mode 100644 index 000000000..074d95be5 --- /dev/null +++ b/Snowflake.Data.Tests/UnitTests/PutGetStageInfoTest.cs @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ + +using System.Collections.Generic; +using NUnit.Framework; +using Snowflake.Data.Core; +using Snowflake.Data.Core.FileTransfer; + +namespace Snowflake.Data.Tests.UnitTests +{ + [TestFixture] + public class PutGetStageInfoTest + { + [Test] + [TestCaseSource(nameof(TestCases))] + public void TestGcsRegionalUrl(string region, bool useRegionalUrl, string endPoint, string expectedGcsEndpoint) + { + // arrange + var stageInfo = CreateGcsStageInfo(region, useRegionalUrl, endPoint); + + // act + var gcsCustomEndpoint = stageInfo.GcsCustomEndpoint(); + + // assert + Assert.AreEqual(expectedGcsEndpoint, gcsCustomEndpoint); + } + + internal static IEnumerable TestCases() + { + yield return new object[] { "US-CENTRAL1", false, null, null }; + yield return new object[] { "US-CENTRAL1", false, "", null }; + yield return new object[] { "US-CENTRAL1", false, "null", null }; + yield return new object[] { "US-CENTRAL1", false, " ", null }; + yield return new object[] { "US-CENTRAL1", false, "example.com", "example.com" }; + yield return new object[] { "ME-CENTRAL2", false, null, "storage.me-central2.rep.googleapis.com" }; + yield return new object[] { "ME-CENTRAL2", true, null, "storage.me-central2.rep.googleapis.com" }; + yield return new object[] { "ME-CENTRAL2", true, "", "storage.me-central2.rep.googleapis.com" }; + yield return new object[] { "ME-CENTRAL2", true, " ", "storage.me-central2.rep.googleapis.com" }; + yield return new object[] { "ME-CENTRAL2", true, "example.com", "example.com" }; + yield return new object[] { "US-CENTRAL1", true, null, "storage.us-central1.rep.googleapis.com" }; + yield return new object[] { "US-CENTRAL1", true, "", "storage.us-central1.rep.googleapis.com" }; + yield return new object[] { "US-CENTRAL1", true, " ", "storage.us-central1.rep.googleapis.com" }; + yield return new object[] { "US-CENTRAL1", true, "null", "storage.us-central1.rep.googleapis.com" }; + yield return new object[] { "US-CENTRAL1", true, "example.com", "example.com" }; + } + + private PutGetStageInfo CreateGcsStageInfo(string region, bool useRegionalUrl, string endPoint) => + new PutGetStageInfo + { + locationType = SFRemoteStorageUtil.GCS_FS, + location = "some location", + path = "some path", + region = region, + storageAccount = "some storage account", + isClientSideEncrypted = true, + stageCredentials = new Dictionary(), + presignedUrl = "some pre-signed url", + endPoint = endPoint, + useRegionalUrl = useRegionalUrl + }; + } +} diff --git a/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs index 08b85a9b5..745f5eaeb 100644 --- a/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs @@ -21,7 +21,7 @@ namespace Snowflake.Data.Tests.UnitTests using Azure; using Azure.Storage.Blobs.Models; - [TestFixture] + [TestFixture, NonParallelizable] class SFAzureClientTest : SFBaseTest { // Mock data for file metadata @@ -377,5 +377,38 @@ public async Task TestDownloadFileAsync(HttpStatusCode httpStatusCode, ResultSta // Assert Assert.AreEqual(expectedResultStatus.ToString(), _fileMetadata.resultStatus); } + + [Test] + public void TestEncryptionMetadataReadingIsCaseInsensitive() + { + // arrange + var metadata = new Dictionary + { + { + "ENCRYPTIONDATA", + @"{ + ""ContentEncryptionIV"": ""initVector"", + ""WrappedContentKey"": { + ""EncryptedKey"": ""key"" + } + }" + }, + { "MATDESC", "description" }, + { "SFCDIGEST", "something"} + }; + var blobProperties = BlobsModelFactory.BlobProperties(metadata: metadata, contentLength: 10); + var mockBlobServiceClient = new Mock(); + _client = new SFSnowflakeAzureClient(_fileMetadata.stageInfo, mockBlobServiceClient.Object); + + // act + var fileHeader = _client.HandleFileHeaderResponse(ref _fileMetadata, blobProperties); + + // assert + Assert.AreEqual(ResultStatus.UPLOADED.ToString(), _fileMetadata.resultStatus); + Assert.AreEqual("something", fileHeader.digest); + Assert.AreEqual("initVector", fileHeader.encryptionMetadata.iv); + Assert.AreEqual("key", fileHeader.encryptionMetadata.key); + Assert.AreEqual("description", fileHeader.encryptionMetadata.matDesc); + } } } diff --git a/Snowflake.Data.Tests/UnitTests/SFEnvironmentTest.cs b/Snowflake.Data.Tests/UnitTests/SFEnvironmentTest.cs index 34aa60f2c..ccf352ac8 100644 --- a/Snowflake.Data.Tests/UnitTests/SFEnvironmentTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFEnvironmentTest.cs @@ -22,6 +22,8 @@ public void TestRuntimeExtraction() expectedVersion = "7.0"; #elif NET8_0 expectedVersion = "8.0"; +#elif NET9_0 + expectedVersion = "9.0"; #endif // Act diff --git a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs index d47742743..ed1257894 100644 --- a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs @@ -1,24 +1,25 @@ /* - * Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ using System; +using NUnit.Framework; +using Snowflake.Data.Core; +using Snowflake.Data.Core.FileTransfer.StorageClient; +using Snowflake.Data.Core.FileTransfer; +using System.Collections.Generic; +using System.Net; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Threading.Tasks; +using System.Threading; +using Snowflake.Data.Tests.Mock; +using Moq; namespace Snowflake.Data.Tests.UnitTests { - using NUnit.Framework; - using Snowflake.Data.Core; - using Snowflake.Data.Core.FileTransfer.StorageClient; - using Snowflake.Data.Core.FileTransfer; - using System.Collections.Generic; - using System.Net; - using System.IO; - using System.Threading.Tasks; - using System.Threading; - using Snowflake.Data.Tests.Mock; - using Moq; - - [TestFixture] + [TestFixture, NonParallelizable] class SFGCSClientTest : SFBaseTest { // Mock data for file metadata @@ -340,6 +341,75 @@ public async Task TestDownloadFileAsync(HttpStatusCode? httpStatusCode, ResultSt AssertForDownloadFileTests(expectedResultStatus); } + [Test] + [TestCase("us-central1", null, null, "https://storage.googleapis.com/mock-customer-stage/mock-id/tables/mock-key/")] + [TestCase("us-central1", "example.com", null, "https://example.com/mock-customer-stage/mock-id/tables/mock-key/")] + [TestCase("us-central1", "https://example.com", null, "https://example.com/mock-customer-stage/mock-id/tables/mock-key/")] + [TestCase("us-central1", null, true, "https://storage.us-central1.rep.googleapis.com/mock-customer-stage/mock-id/tables/mock-key/")] + [TestCase("me-central2", null, null, "https://storage.me-central2.rep.googleapis.com/mock-customer-stage/mock-id/tables/mock-key/")] + public void TestUseUriWithRegionsWhenNeeded(string region, string endPoint, bool useRegionalUrl, string expectedRequestUri) + { + var fileMetadata = new SFFileMetadata() + { + stageInfo = new PutGetStageInfo() + { + endPoint = endPoint, + location = Location, + locationType = SFRemoteStorageUtil.GCS_FS, + path = LocationPath, + presignedUrl = null, + region = region, + stageCredentials = _stageCredentials, + storageAccount = null, + useRegionalUrl = useRegionalUrl + } + }; + + // act + var uri = _client.FormBaseRequest(fileMetadata, "PUT").RequestUri.ToString(); + + // assert + Assert.AreEqual(expectedRequestUri, uri); + } + + [Test] + [TestCase("some-header-name", "SOME-HEADER-NAME")] + [TestCase("SOME-HEADER-NAME", "some-header-name")] + public void TestGcsHeadersAreCaseInsensitiveForHttpResponseMessage(string headerNameToAdd, string headerNameToGet) + { + // arrange + const string HeaderValue = "someValue"; + var responseMessage = new HttpResponseMessage( HttpStatusCode.OK ) {Content = new StringContent( "Response content" ) }; + responseMessage.Headers.Add(headerNameToAdd, HeaderValue); + + // act + var header = responseMessage.Headers.GetValues(headerNameToGet); + + // assert + Assert.NotNull(header); + Assert.AreEqual(1, header.Count()); + Assert.AreEqual(HeaderValue, header.First()); + } + + [Test] + [TestCase("some-header-name", "SOME-HEADER-NAME")] + [TestCase("SOME-HEADER-NAME", "some-header-name")] + public void TestGcsHeadersAreCaseInsensitiveForWebHeaderCollection(string headerNameToAdd, string headerNameToGet) + { + // arrange + const string HeaderValue = "someValue"; + var headers = new WebHeaderCollection(); + headers.Add(headerNameToAdd, HeaderValue); + + // act + var header = headers.GetValues(headerNameToGet); + + // assert + Assert.NotNull(header); + Assert.AreEqual(1, header.Count()); + Assert.AreEqual(HeaderValue, header.First()); + } + private void AssertForDownloadFileTests(ResultStatus expectedResultStatus) { if (expectedResultStatus == ResultStatus.DOWNLOADED) diff --git a/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs index 5432b0121..0f2de32a6 100644 --- a/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs @@ -1,27 +1,25 @@ /* - * Copyright (c) 2012-2023 Snowflake Computing Inc. All rights reserved. + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ using System; -using Amazon.S3.Encryption; +using NUnit.Framework; +using Snowflake.Data.Core; +using Snowflake.Data.Core.FileTransfer.StorageClient; +using Snowflake.Data.Core.FileTransfer; +using System.Collections.Generic; +using Amazon.S3; +using Snowflake.Data.Tests.Mock; +using System.Threading.Tasks; +using Amazon; +using System.Threading; +using System.IO; +using Moq; +using Amazon.S3.Model; namespace Snowflake.Data.Tests.UnitTests { - using NUnit.Framework; - using Snowflake.Data.Core; - using Snowflake.Data.Core.FileTransfer.StorageClient; - using Snowflake.Data.Core.FileTransfer; - using System.Collections.Generic; - using Amazon.S3; - using Snowflake.Data.Tests.Mock; - using System.Threading.Tasks; - using Amazon; - using System.Threading; - using System.IO; - using Moq; - using Amazon.S3.Model; - - [TestFixture] + [TestFixture, NonParallelizable] class SFS3ClientTest : SFBaseTest { // Mock data for file metadata @@ -320,6 +318,29 @@ public async Task TestDownloadFileAsync(string awsStatusCode, ResultStatus expec AssertForDownloadFileTests(expectedResultStatus); } + [Test] + public void TestEncryptionMetadataReadingIsCaseInsensitive() + { + // arrange + var mockAmazonS3Client = new Mock(AwsKeyId, AwsSecretKey, AwsToken, _clientConfig); + _client = new SFS3Client(_fileMetadata.stageInfo, MaxRetry, Parallel, _proxyCredentials, mockAmazonS3Client.Object); + var response = new GetObjectResponse(); + response.Metadata.Add(SFS3Client.AMZ_IV.ToUpper(), "initVector"); + response.Metadata.Add(SFS3Client.AMZ_KEY.ToUpper(), "key"); + response.Metadata.Add(SFS3Client.AMZ_MATDESC.ToUpper(), "description"); + response.Metadata.Add(SFS3Client.SFC_DIGEST.ToUpper(), "something"); + + // act + var fileHeader = _client.HandleFileHeaderResponse(ref _fileMetadata, response); + + // assert + Assert.AreEqual(ResultStatus.UPLOADED.ToString(), _fileMetadata.resultStatus); + Assert.AreEqual("something", fileHeader.digest); + Assert.AreEqual("initVector", fileHeader.encryptionMetadata.iv); + Assert.AreEqual("key", fileHeader.encryptionMetadata.key); + Assert.AreEqual("description", fileHeader.encryptionMetadata.matDesc); + } + private void AssertForDownloadFileTests(ResultStatus expectedResultStatus) { if (expectedResultStatus == ResultStatus.DOWNLOADED) diff --git a/Snowflake.Data.Tests/UnitTests/SFStatementTest.cs b/Snowflake.Data.Tests/UnitTests/SFStatementTest.cs index 3f131e924..5d27c6add 100755 --- a/Snowflake.Data.Tests/UnitTests/SFStatementTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFStatementTest.cs @@ -1,13 +1,15 @@ /* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. + * Copyright (c) 2012-2024 Snowflake Computing Inc. All rights reserved. */ +using System.Threading; +using Snowflake.Data.Client; +using Snowflake.Data.Core; +using NUnit.Framework; +using System; + namespace Snowflake.Data.Tests.UnitTests { - using Snowflake.Data.Core; - using NUnit.Framework; - using System; - /** * Mock rest request test */ @@ -191,5 +193,25 @@ public void TestIsAnError(QueryStatus status, bool expectedResult) { Assert.AreEqual(expectedResult, QueryStatusExtensions.IsAnError(status)); } + + [Test] + public void TestHandleNullDataForFailedResponse() + { + // arrange + var response = new QueryExecResponse + { + success = false, + code = 500, + message = "internal error" + }; + var session = new SFSession("account=myAccount;password=myPassword;user=myUser;db=myDB", null); + var statement = new SFStatement(session); + + // act + var thrown = Assert.Throws(() => statement.BuildResultSet(response, CancellationToken.None)); + + // assert + Assert.AreEqual("Error: internal error SqlState: , VendorCode: 500, QueryId: ", thrown.Message); + } } } diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs index f56baf2fa..e5d0ac139 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs @@ -10,6 +10,8 @@ using Newtonsoft.Json; using Snowflake.Data.Log; using System.Net; +using Google.Apis.Storage.v1; +using Google.Cloud.Storage.V1; namespace Snowflake.Data.Core.FileTransfer.StorageClient { @@ -52,6 +54,8 @@ class SFGCSClient : ISFRemoteStorageClient /// private WebRequest _customWebRequest = null; + private static readonly string[] s_scopes = new[] { StorageService.Scope.DevstorageFullControl }; + /// /// GCS client with access token. /// @@ -65,15 +69,32 @@ public SFGCSClient(PutGetStageInfo stageInfo) Logger.Debug("Constructing client using access token"); AccessToken = accessToken; GoogleCredential creds = GoogleCredential.FromAccessToken(accessToken, null); - StorageClient = Google.Cloud.Storage.V1.StorageClient.Create(creds); + var storageClientBuilder = new StorageClientBuilder + { + Credential = creds?.CreateScoped(s_scopes), + EncryptionKey = null + }; + StorageClient = BuildStorageClient(storageClientBuilder, stageInfo); } else { Logger.Info("No access token received from GS, constructing anonymous client with no encryption support"); - StorageClient = Google.Cloud.Storage.V1.StorageClient.CreateUnauthenticated(); + var storageClientBuilder = new StorageClientBuilder + { + UnauthenticatedAccess = true + }; + StorageClient = BuildStorageClient(storageClientBuilder, stageInfo); } } + private Google.Cloud.Storage.V1.StorageClient BuildStorageClient(StorageClientBuilder builder, PutGetStageInfo stageInfo) + { + var gcsCustomEndpoint = stageInfo.GcsCustomEndpoint(); + if (!string.IsNullOrEmpty(gcsCustomEndpoint)) + builder.BaseUri = gcsCustomEndpoint; + return builder.Build(); + } + internal void SetCustomWebRequest(WebRequest mockWebRequest) { _customWebRequest = mockWebRequest; @@ -112,7 +133,7 @@ public RemoteLocation ExtractBucketNameAndPath(string stageLocation) internal WebRequest FormBaseRequest(SFFileMetadata fileMetadata, string method) { string url = string.IsNullOrEmpty(fileMetadata.presignedUrl) ? - generateFileURL(fileMetadata.stageInfo.location, fileMetadata.RemoteFileName()) : + generateFileURL(fileMetadata.stageInfo, fileMetadata.RemoteFileName()) : fileMetadata.presignedUrl; WebRequest request = WebRequest.Create(url); @@ -219,19 +240,26 @@ public async Task GetFileHeaderAsync(SFFileMetadata fileMetadata, Ca return null; } - /// - /// Generate the file URL. - /// - /// The GCS file metadata. - /// The GCS file metadata. - internal string generateFileURL(string stageLocation, string fileName) + internal string generateFileURL(PutGetStageInfo stageInfo, string fileName) { - var gcsLocation = ExtractBucketNameAndPath(stageLocation); + var storageHostPath = ExtractStorageHostPath(stageInfo); + var gcsLocation = ExtractBucketNameAndPath(stageInfo.location); var fullFilePath = gcsLocation.key + fileName; - var link = "https://storage.googleapis.com/" + gcsLocation.bucket + "/" + fullFilePath; + var link = storageHostPath + gcsLocation.bucket + "/" + fullFilePath; return link; } + private string ExtractStorageHostPath(PutGetStageInfo stageInfo) + { + var gcsEndpoint = stageInfo.GcsCustomEndpoint(); + var storageHostPath = string.IsNullOrEmpty(gcsEndpoint) ? "https://storage.googleapis.com/" : gcsEndpoint; + if (!storageHostPath.StartsWith("https://", StringComparison.OrdinalIgnoreCase)) + storageHostPath = "https://" + storageHostPath; + if (!storageHostPath.EndsWith("/")) + storageHostPath = storageHostPath + "/"; + return storageHostPath; + } + /// /// Upload the file to the GCS location. /// diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs index ea0eb3fd0..524dc23c1 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs @@ -9,6 +9,7 @@ using Snowflake.Data.Log; using System; using System.IO; +using System.Linq; using System.Net; using System.Threading; using System.Threading.Tasks; @@ -266,26 +267,38 @@ private GetObjectRequest GetFileHeaderRequest(ref AmazonS3Client client, SFFileM /// The S3 file metadata. /// The Amazon S3 response. /// The file header of the S3 file. - private FileHeader HandleFileHeaderResponse(ref SFFileMetadata fileMetadata, GetObjectResponse response) + internal FileHeader HandleFileHeaderResponse(ref SFFileMetadata fileMetadata, GetObjectResponse response) { // Update the result status of the file metadata fileMetadata.resultStatus = ResultStatus.UPLOADED.ToString(); SFEncryptionMetadata encryptionMetadata = new SFEncryptionMetadata { - iv = response.Metadata[AMZ_IV], - key = response.Metadata[AMZ_KEY], - matDesc = response.Metadata[AMZ_MATDESC] + iv = GetMetadataCaseInsensitive(response.Metadata, AMZ_IV), + key = GetMetadataCaseInsensitive(response.Metadata, AMZ_KEY), + matDesc = GetMetadataCaseInsensitive(response.Metadata, AMZ_MATDESC) }; return new FileHeader { - digest = response.Metadata[SFC_DIGEST], + digest = GetMetadataCaseInsensitive(response.Metadata, SFC_DIGEST), contentLength = response.ContentLength, encryptionMetadata = encryptionMetadata }; } + private string GetMetadataCaseInsensitive(MetadataCollection metadataCollection, string metadataKey) + { + var value = metadataCollection[metadataKey]; + if (value != null) + return value; + if (string.IsNullOrEmpty(metadataKey)) + return null; + var keysCaseInsensitive = metadataCollection.Keys + .Where(key => $"x-amz-meta-{metadataKey}".Equals(key, StringComparison.OrdinalIgnoreCase)); + return keysCaseInsensitive.Any() ? metadataCollection[keysCaseInsensitive.First()] : null; + } + /// /// Set the client configuration common to both client with and without client-side /// encryption. diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs index 98c2694cb..d13dc01b9 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs @@ -7,6 +7,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Linq; using Azure; using Azure.Storage.Blobs.Models; using Newtonsoft.Json; @@ -154,30 +155,48 @@ public async Task GetFileHeaderAsync(SFFileMetadata fileMetadata, Ca /// The S3 file metadata. /// The Amazon S3 response. /// The file header of the S3 file. - private FileHeader HandleFileHeaderResponse(ref SFFileMetadata fileMetadata, BlobProperties response) + internal FileHeader HandleFileHeaderResponse(ref SFFileMetadata fileMetadata, BlobProperties response) { fileMetadata.resultStatus = ResultStatus.UPLOADED.ToString(); SFEncryptionMetadata encryptionMetadata = null; - if (response.Metadata.TryGetValue("encryptiondata", out var encryptionDataStr)) + if (TryGetMetadataValueCaseInsensitive(response, "encryptiondata", out var encryptionDataStr)) { dynamic encryptionData = JsonConvert.DeserializeObject(encryptionDataStr); encryptionMetadata = new SFEncryptionMetadata { iv = encryptionData["ContentEncryptionIV"], key = encryptionData.WrappedContentKey["EncryptedKey"], - matDesc = response.Metadata["matdesc"] + matDesc = GetMetadataValueCaseInsensitive(response, "matdesc") }; } return new FileHeader { - digest = response.Metadata["sfcdigest"], + digest = GetMetadataValueCaseInsensitive(response, "sfcdigest"), contentLength = response.ContentLength, encryptionMetadata = encryptionMetadata }; } + private bool TryGetMetadataValueCaseInsensitive(BlobProperties properties, string metadataKey, out string metadataValue) + { + if (properties.Metadata.TryGetValue(metadataKey, out metadataValue)) + return true; + if (string.IsNullOrEmpty(metadataKey)) + return false; + var keysCaseInsensitive = properties.Metadata.Keys + .Where(key => metadataKey.Equals(key, StringComparison.OrdinalIgnoreCase)); + return keysCaseInsensitive.Any() ? properties.Metadata.TryGetValue(keysCaseInsensitive.First(), out metadataValue) : false; + } + + private string GetMetadataValueCaseInsensitive(BlobProperties properties, string metadataKey) + { + if (TryGetMetadataValueCaseInsensitive(properties, metadataKey, out var metadataValue)) + return metadataValue; + throw new KeyNotFoundException($"The given key '{metadataKey}' was not present in the dictionary."); + } + /// /// Upload the file to the Azure location. /// diff --git a/Snowflake.Data/Core/RestResponse.cs b/Snowflake.Data/Core/RestResponse.cs index 64275fa42..b490ddcdc 100755 --- a/Snowflake.Data/Core/RestResponse.cs +++ b/Snowflake.Data/Core/RestResponse.cs @@ -8,6 +8,7 @@ using Newtonsoft.Json.Converters; using Newtonsoft.Json.Linq; using Snowflake.Data.Client; +using Snowflake.Data.Core.FileTransfer; namespace Snowflake.Data.Core { @@ -439,6 +440,22 @@ internal class PutGetStageInfo [JsonProperty(PropertyName = "endPoint", NullValueHandling = NullValueHandling.Ignore)] internal string endPoint { get; set; } + + [JsonProperty(PropertyName = "useRegionalUrl", NullValueHandling = NullValueHandling.Ignore)] + internal bool useRegionalUrl { get; set; } + + private const string GcsRegionMeCentral2 = "me-central2"; + + internal string GcsCustomEndpoint() + { + if (!(locationType ?? string.Empty).Equals(SFRemoteStorageUtil.GCS_FS, StringComparison.OrdinalIgnoreCase)) + return null; + if (!string.IsNullOrWhiteSpace(endPoint) && endPoint != "null") + return endPoint; + if (GcsRegionMeCentral2.Equals(region, StringComparison.OrdinalIgnoreCase) || useRegionalUrl) + return $"storage.{region.ToLower()}.rep.googleapis.com"; + return null; + } } internal class PutGetEncryptionMaterial diff --git a/Snowflake.Data/Core/SFStatement.cs b/Snowflake.Data/Core/SFStatement.cs index 146a10130..7dac1c600 100644 --- a/Snowflake.Data/Core/SFStatement.cs +++ b/Snowflake.Data/Core/SFStatement.cs @@ -284,7 +284,7 @@ private void CleanUpCancellationTokenSources() } } - private SFBaseResultSet BuildResultSet(QueryExecResponse response, CancellationToken cancellationToken) + internal SFBaseResultSet BuildResultSet(QueryExecResponse response, CancellationToken cancellationToken) { if ((response.data != null) && (response.data.queryId != null)) { @@ -308,8 +308,8 @@ private SFBaseResultSet BuildResultSet(QueryExecResponse response, CancellationT } } - throw new SnowflakeDbException(response.data.sqlState, - response.code, response.message, response.data.queryId); + throw new SnowflakeDbException(response.data?.sqlState, + response.code, response.message, response.data?.queryId); } private void SetTimeout(int timeout) diff --git a/Snowflake.Data/Core/Tools/Diagnostics.cs b/Snowflake.Data/Core/Tools/Diagnostics.cs index 0e9f5b0dd..22d37daec 100644 --- a/Snowflake.Data/Core/Tools/Diagnostics.cs +++ b/Snowflake.Data/Core/Tools/Diagnostics.cs @@ -1,5 +1,4 @@ using System; -using System.Diagnostics; using System.Reflection; using System.Runtime; using System.Runtime.InteropServices; @@ -18,22 +17,31 @@ internal class Diagnostics private static string GetDiagnosticInfo() { StringBuilder info = new StringBuilder("System Diagnostics:\n"); - info.AppendLine($"{"OS", PadRight}: {OsName()}"); - info.AppendLine($"{"OS Description", PadRight}: {RuntimeInformation.OSDescription}"); - info.AppendLine($"{"OS Architecture", PadRight}: {RuntimeInformation.OSArchitecture}"); - info.AppendLine($"{"OS Version", PadRight}: {Environment.OSVersion}"); - info.AppendLine($"{"OS x64", PadRight}: {Environment.Is64BitOperatingSystem}"); - info.AppendLine($"{"Processor Architecture", PadRight}: {RuntimeInformation.ProcessArchitecture}"); - info.AppendLine($"{"Processor Count", PadRight}: {Environment.ProcessorCount}"); - info.AppendLine($"{".NET Framework", PadRight}: {RuntimeInformation.FrameworkDescription}"); - info.AppendLine($"{"CLR Runtime Version", PadRight}: {Environment.Version}"); - info.AppendLine($"{"App x64", PadRight}: {Environment.Is64BitProcess}"); - info.AppendLine($"{"GC Server Mode", PadRight}: {GCSettings.IsServerGC}"); - info.AppendLine($"{"GC LOH Compaction Mode", PadRight}: {GCSettings.LargeObjectHeapCompactionMode}"); - info.AppendLine($"{"GC Latency Mode", PadRight}: {GCSettings.LatencyMode}"); - info.AppendLine($"{"GC Total Memory", PadRight}: {GC.GetTotalMemory(false)}"); - AppendAssemblyInfo(info, Assembly.GetEntryAssembly(), "App"); - AppendAssemblyInfo(info, Assembly.GetExecutingAssembly(), "Driver"); + try + { + info.AppendLine($"{"OS",PadRight}: {OsName()}"); + info.AppendLine($"{"OS Description",PadRight}: {RuntimeInformation.OSDescription}"); + info.AppendLine($"{"OS Architecture",PadRight}: {RuntimeInformation.OSArchitecture}"); + info.AppendLine($"{"OS Version",PadRight}: {Environment.OSVersion}"); + info.AppendLine($"{"OS x64",PadRight}: {Environment.Is64BitOperatingSystem}"); + info.AppendLine($"{"Processor Architecture",PadRight}: {RuntimeInformation.ProcessArchitecture}"); + info.AppendLine($"{"Processor Count",PadRight}: {Environment.ProcessorCount}"); + info.AppendLine($"{".NET Framework",PadRight}: {RuntimeInformation.FrameworkDescription}"); + info.AppendLine($"{"CLR Runtime Version",PadRight}: {Environment.Version}"); + info.AppendLine($"{"App x64",PadRight}: {Environment.Is64BitProcess}"); + info.AppendLine($"{"GC Server Mode",PadRight}: {GCSettings.IsServerGC}"); + info.AppendLine($"{"GC LOH Compaction Mode",PadRight}: {GCSettings.LargeObjectHeapCompactionMode}"); + info.AppendLine($"{"GC Latency Mode",PadRight}: {GCSettings.LatencyMode}"); + info.AppendLine($"{"GC Total Memory",PadRight}: {GC.GetTotalMemory(false)}"); + AppendAssemblyInfo(info, Assembly.GetEntryAssembly(), "App"); + AppendAssemblyInfo(info, Assembly.GetExecutingAssembly(), "Driver"); + } + catch (Exception exception) + { + var errorMessage = $"Error caught while collecting diagnostic info: {exception.Message}"; + s_logger.Error(errorMessage, exception); + info.AppendLine(errorMessage); + } return info.ToString(); } @@ -41,10 +49,8 @@ private static void AppendAssemblyInfo(StringBuilder info, Assembly assembly, st { if (assembly != null) { - var assemblyVersion = FileVersionInfo.GetVersionInfo(assembly.Location); - info.AppendLine($"{assemblyTag + " Name", PadRight}: {assemblyVersion.InternalName}"); - info.AppendLine($"{assemblyTag + " File", PadRight}: {assemblyVersion.FileName}"); - info.AppendLine($"{assemblyTag + " Version", PadRight}: {assemblyVersion.FileVersion}"); + info.AppendLine($"{assemblyTag + "Name",PadRight}: {assembly.GetName().Name}"); + info.AppendLine($"{assemblyTag + "Version",PadRight}: {assembly.GetName().Version}"); } } diff --git a/ci/_init.sh b/ci/_init.sh index 1b8314100..0df62eb9e 100755 --- a/ci/_init.sh +++ b/ci/_init.sh @@ -21,11 +21,11 @@ BUILD_IMAGE_VERSION=1 TEST_IMAGE_VERSION=1 declare -A BUILD_IMAGE_NAMES=( - [$DRIVER_NAME-ubuntu204-net8]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-ubuntu204-net8-build:$BUILD_IMAGE_VERSION + [$DRIVER_NAME-ubuntu204-net9]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-ubuntu204-net9-build:$BUILD_IMAGE_VERSION ) export BUILD_IMAGE_NAMES declare -A TEST_IMAGE_NAMES=( - [$DRIVER_NAME-ubuntu204-net8]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-ubuntu204-net8-test:$TEST_IMAGE_VERSION + [$DRIVER_NAME-ubuntu204-net9]=$DOCKER_REGISTRY_NAME/client-$DRIVER_NAME-ubuntu204-net9-test:$TEST_IMAGE_VERSION ) export TEST_IMAGE_NAMES diff --git a/ci/image/Dockerfile.dotnet-ubuntu204-net8-build b/ci/image/Dockerfile.dotnet-ubuntu204-net8-build index a50523142..c23822539 100644 --- a/ci/image/Dockerfile.dotnet-ubuntu204-net8-build +++ b/ci/image/Dockerfile.dotnet-ubuntu204-net8-build @@ -39,7 +39,7 @@ RUN apt-get install -y dotnet-sdk-8.0 # gosu RUN wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/1.14/gosu-$(dpkg --print-architecture)" && \ -chmod +x /usr/local/bin/gosu && \ +chmod +x /usr/local/bin/gosu # clean up RUN apt-get clean diff --git a/ci/image/Dockerfile.dotnet-ubuntu204-net9-build b/ci/image/Dockerfile.dotnet-ubuntu204-net9-build new file mode 100644 index 000000000..011f20d6c --- /dev/null +++ b/ci/image/Dockerfile.dotnet-ubuntu204-net9-build @@ -0,0 +1,56 @@ +# Use the official Ubuntu base image from Docker Hub +FROM ubuntu:20.04 + +USER root +WORKDIR / + +ENV DEBIAN_FRONTEND noninteractive + +# create dir for workspace +RUN mkdir -p /home/user +RUN chmod 777 /home/user + +# Update packages and install any desired dependencies +RUN apt-get update +RUN apt-get install -y +RUN apt-get install -y wget +RUN apt-get install -y apt-transport-https +RUN apt-get install -y build-essential +RUN apt-get install -y libpng-dev +RUN apt-get install -y libtiff-dev +RUN apt-get install -y libjpeg-dev +RUN apt-get install -y libx11-dev +RUN apt-get install -y libgl1-mesa-dev +RUN apt-get install -y libglu1-mesa-dev +RUN apt-get install -y gcc +RUN wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb +RUN dpkg -i packages-microsoft-prod.deb + +# install basic tools +RUN apt-get install -y git +RUN apt-get install -y zstd +RUN apt-get install -y jq + +# Install .NET SDK 9.0 +RUN apt-get update +RUN apt-get install -y dotnet-sdk-6.0 +RUN apt-get install -y dotnet-sdk-8.0 +RUN apt-get install -y dotnet-sdk-9.0 + +# gosu +RUN wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/1.14/gosu-$(dpkg --print-architecture)" && \ +chmod +x /usr/local/bin/gosu + +# clean up +RUN apt-get clean +RUN rm -rf /var/lib/apt/lists/* + +# workspace +RUN mkdir -p /home/user && \ +chmod 777 /home/user +WORKDIR /home/user + +# entry point +COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/image/Dockerfile.dotnet-ubuntu204-net9-test b/ci/image/Dockerfile.dotnet-ubuntu204-net9-test new file mode 100644 index 000000000..b4dffdb60 --- /dev/null +++ b/ci/image/Dockerfile.dotnet-ubuntu204-net9-test @@ -0,0 +1 @@ +FROM nexus.int.snowflakecomputing.com:8086/docker/client-dotnet-ubuntu204-net9-build:1 diff --git a/doc/CertficateValidation.md b/doc/CertficateValidation.md index 205049d98..735239f04 100644 --- a/doc/CertficateValidation.md +++ b/doc/CertficateValidation.md @@ -66,6 +66,57 @@ Example output: Please repeat the same for all the Snowflake-related endpoints from step 1. +For Windows if you do not wish to download additional tools, you can also use the existing Powershell facility. Please find the below Powershell script as a simplistic example of a possible approach. In this example, you would put below contents into `checkCrl.ps1` script: +```ps +if ( $($args.Count) -ne 1 ) { + Write-Output "Please use the full name of your Snowflake account as an argument." + Write-Output "Example: powershell .\checkCrl.ps1 xy12345.eu-central-1.snowflakecomputing.com" + exit 1 +} +$sfaccount = $args[0] +$Connection = [System.Net.HttpWebRequest]::Create('https://' + $sfaccount) +$Response = $Connection.GetResponse() +$Response.Dispose() +$Certificate = $Connection.ServicePoint.Certificate +$Chain = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Chain +$Chain.build($Certificate) +$Chain.ChainElements.Certificate | % {set-content -value $($_.Export([Security.Cryptography.X509Certificates.X509ContentType]::Cert)) -encoding byte -path "$pwd\$($_.Thumbprint).sf.cer"} +Get-ChildItem *.sf.cer | ForEach-Object { certutil $_ | Select-String -Pattern "Subject:" -Context 1 ; certutil $_ | Select-String -Pattern "Distribution Point Name" -Context 2 } +Remove-Item *.sf.cer +``` + +After saving it, you can run it with specifying your Snowflake account's full name. An example execution and output, for a Snowflake account located in GCP US Central region: +```shell +c:\temp>powershell .\checkCrl.ps1 xy12345.us-central1.gcp.snowflakecomputing.com +True + + +> Subject: + CN=DigiCert Global G2 TLS RSA SHA256 2020 CA1 + CRL Distribution Points + [1]CRL Distribution Point +> Distribution Point Name: + Full Name: + URL=http://crl3.digicert.com/DigiCertGlobalRootG2.crl + +> Subject: + CN=*.us-central1.gcp.snowflakecomputing.com + CRL Distribution Points + [1]CRL Distribution Point +> Distribution Point Name: + Full Name: + URL=http://crl3.digicert.com/DigiCertGlobalG2TLSRSASHA2562020CA1-1.crl + [2]CRL Distribution Point +> Distribution Point Name: + Full Name: + URL=http://crl4.digicert.com/DigiCertGlobalG2TLSRSASHA2562020CA1-1.crl + +> Subject: + CN=DigiCert Global Root G2 +``` + +Look for values of `URL` fields under `Distribution Point Name` sections. + 3. **Ensure (or work with your systems / network / cloud team to ensure) the CRL endpoints from step 2 are reachable from the _same host/network, over port 80_, on which host/network your application is running, which application is using the Snowflake .NET driver** If your network includes any proxies through which the connection is sent, do make sure those proxies allow the connectivity to the CRL endpoints over port 80.