Skip to content

Commit

Permalink
style: Run dotnet format
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] committed Aug 5, 2024
1 parent 50623a4 commit 6773f2b
Show file tree
Hide file tree
Showing 21 changed files with 177 additions and 177 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public async Task<bool> DeleteAsync(
CancellationToken cancellationToken = default)
{
ids = ids ?? throw new ArgumentNullException(nameof(ids));

throw new NotImplementedException();
// foreach (var id in ids)
// {
Expand Down Expand Up @@ -63,7 +63,7 @@ public async Task<VectorSearchResponse> SearchAsync(
{
request = request ?? throw new ArgumentNullException(nameof(request));
settings ??= new VectorSearchSettings();

throw new NotImplementedException();
// var response = await client.SearchAsync<MyDoc>(s => s
// .Index("my_index")
Expand Down
20 changes: 10 additions & 10 deletions src/Databases/IntegrationTests/DatabaseTests.Configure.cs
Original file line number Diff line number Diff line change
Expand Up @@ -137,18 +137,18 @@ private static async Task<DatabaseTestEnvironment> StartEnvironmentForAsync(Supp
VectorDatabase = new DuckDbVectorDatabase(store)
};
case SupportedDatabase.Elasticsearch:
{
var container = new ElasticsearchBuilder().Build();
{
var container = new ElasticsearchBuilder().Build();

await container.StartAsync(cancellationToken);
await container.StartAsync(cancellationToken);

var client = new ElasticsearchClient(new Uri($"http://localhost:{container.GetMappedPublicPort(9200)}"));
return new DatabaseTestEnvironment
{
VectorDatabase = new ElasticsearchVectorDatabase(client),
Container = container,
};
}
var client = new ElasticsearchClient(new Uri($"http://localhost:{container.GetMappedPublicPort(9200)}"));
return new DatabaseTestEnvironment
{
VectorDatabase = new ElasticsearchVectorDatabase(client),
Container = container,
};
}
case SupportedDatabase.Milvus:
{
var network = new NetworkBuilder()
Expand Down
4 changes: 2 additions & 2 deletions src/Helpers/GenerateDocs/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
{
var folder = Path.GetFileName(Path.GetDirectoryName(path) ?? string.Empty)?.Replace("LangChain.Samples.", string.Empty);
var code = await File.ReadAllTextAsync(path);

var newDir = Path.Combine(solutionDirectory, "docs", "samples");
Directory.CreateDirectory(newDir);

var newPath = Path.Combine(newDir, $"{folder}.md");
await File.WriteAllTextAsync(newPath, $@"```csharp
{code}
Expand Down
10 changes: 5 additions & 5 deletions src/Meta/test/BaseTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ public async Task FiveRandomWords(ProviderType providerType)
var answer = await llm.GenerateAsync(
request: "Answer me five random words",
cancellationToken: CancellationToken.None).ConfigureAwait(false);

Console.WriteLine($"LLM answer: {answer}"); // The cloaked figure.
Console.WriteLine($"LLM usage: {llm.Usage}"); // Print usage and price

Expand All @@ -47,7 +47,7 @@ public async Task Streaming(ProviderType providerType)
llm.PromptSent += (_, prompt) => Console.WriteLine($"Prompt: {prompt}");
llm.PartialResponseGenerated += (_, delta) => Console.WriteLine(delta);
llm.CompletedResponseGenerated += (_, prompt) => Console.WriteLine($"Completed response: {prompt}");

llm.Settings = new ChatSettings
{
UseStreaming = true,
Expand All @@ -57,7 +57,7 @@ public async Task Streaming(ProviderType providerType)

response.LastMessageContent.Should().NotBeNull();
}

[TestCase(ProviderType.OpenAi)]
//[TestCase(ProviderType.Anyscale)]
//[TestCase(ProviderType.Together)]
Expand Down Expand Up @@ -85,7 +85,7 @@ public async Task SimpleChain(ProviderType providerType)

Console.WriteLine(result.Value["text"]);
}

[TestCase(ProviderType.OpenAi)]
//[TestCase(ProviderType.Anyscale)]
[TestCase(ProviderType.Together)]
Expand Down Expand Up @@ -114,7 +114,7 @@ public async Task Tools_Weather(ProviderType providerType)

Console.WriteLine(response.Messages.AsHistory());
}

[TestCase(ProviderType.OpenAi)]
//[TestCase(ProviderType.Anyscale)]
//[TestCase(ProviderType.Together)]
Expand Down
4 changes: 2 additions & 2 deletions src/Meta/test/BookStoreService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ public Task<List<GetAuthorBook>> GetAuthorBooksAsync(string authorName, Cancella
{
return Task.FromResult(new List<GetAuthorBook>([
new GetAuthorBook
{ Title = "Five point someone", Description = "This book is about 3 college friends" },
{ Title = "Five point someone", Description = "This book is about 3 college friends" },
new GetAuthorBook
{ Title = "Two States", Description = "This book is about intercast marriage in India" }
{ Title = "Two States", Description = "This book is about intercast marriage in India" }
]));
}

Expand Down
212 changes: 106 additions & 106 deletions src/Meta/test/Helpers.cs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public static (IChatModel ChatModel, IEmbeddingModel EmbeddingModel) GetModels(P
throw new InconclusiveException("TOGETHER_API_KEY is not set"));
var llm = new TogetherAiModel(provider, id: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo");
var embeddings = new OpenAiEmbeddingModel(provider, id: "togethercomputer/m2-bert-80M-2k-retrieval");

return (llm, embeddings);
}
case ProviderType.Anyscale:
Expand Down Expand Up @@ -77,122 +77,122 @@ public static (IChatModel ChatModel, IEmbeddingModel EmbeddingModel) GetModels(P
return (llm, embeddings);
}
case ProviderType.OpenRouter:
{
var provider = new OpenRouterProvider(
apiKey: Environment.GetEnvironmentVariable("OPENROUTER_API_KEY") ??
throw new InconclusiveException("OPENROUTER_API_KEY is not set"));
var llm = new Providers.OpenRouter.Predefined.OpenAiGpt4OModel(provider);

// Use OpenAI embeddings for now because OpenRouter doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
{
var provider = new OpenRouterProvider(
apiKey: Environment.GetEnvironmentVariable("OPENROUTER_API_KEY") ??
throw new InconclusiveException("OPENROUTER_API_KEY is not set"));
var llm = new Providers.OpenRouter.Predefined.OpenAiGpt4OModel(provider);

// Use OpenAI embeddings for now because OpenRouter doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
case ProviderType.DeepInfra:
{
var provider = new DeepInfraProvider(
apiKey: Environment.GetEnvironmentVariable("DEEPINFRA_API_KEY") ??
throw new InconclusiveException("DEEPINFRA_API_KEY is not set"));
var llm = new Providers.DeepInfra.Predefined.MetaLlama318BInstructModel(provider);

// Use OpenAI embeddings for now because DeepInfra doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
{
var provider = new DeepInfraProvider(
apiKey: Environment.GetEnvironmentVariable("DEEPINFRA_API_KEY") ??
throw new InconclusiveException("DEEPINFRA_API_KEY is not set"));
var llm = new Providers.DeepInfra.Predefined.MetaLlama318BInstructModel(provider);

// Use OpenAI embeddings for now because DeepInfra doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
case ProviderType.Google:
{
var provider = new GoogleProvider(
apiKey: Environment.GetEnvironmentVariable("GOOGLE_API_KEY") ??
throw new InconclusiveException("GOOGLE_API_KEY is not set"),
httpClient: new HttpClient());
var llm = new GeminiProModel(provider);

// Use OpenAI embeddings for now because Google doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
{
var provider = new GoogleProvider(
apiKey: Environment.GetEnvironmentVariable("GOOGLE_API_KEY") ??
throw new InconclusiveException("GOOGLE_API_KEY is not set"),
httpClient: new HttpClient());
var llm = new GeminiProModel(provider);

// Use OpenAI embeddings for now because Google doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
case ProviderType.Anthropic:
{
var provider = new AnthropicProvider(
apiKey: Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ??
throw new InconclusiveException("ANTHROPIC_API_KEY is not set"));
var llm = new Claude35Sonnet(provider);

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
{
var provider = new AnthropicProvider(
apiKey: Environment.GetEnvironmentVariable("ANTHROPIC_API_KEY") ??
throw new InconclusiveException("ANTHROPIC_API_KEY is not set"));
var llm = new Claude35Sonnet(provider);

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
case ProviderType.Groq:
{
var config = new GroqConfiguration()
{
ApiKey = Environment.GetEnvironmentVariable("GROQ_API_KEY") ??
throw new InconclusiveException("GROQ_API_KEY is not set.")
};
var config = new GroqConfiguration()
{
ApiKey = Environment.GetEnvironmentVariable("GROQ_API_KEY") ??
throw new InconclusiveException("GROQ_API_KEY is not set.")
};

var provider = new GroqProvider(config);
var llm = new Llama370B(provider);
var provider = new GroqProvider(config);
var llm = new Llama370B(provider);

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));
// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
return (llm, embeddings);
}
case ProviderType.DeepSeek:
{
var apiKey =
Environment.GetEnvironmentVariable("DEEPSEEK_API_KEY", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("DEEPSEEK_API_KEY is not set");
var llm = new DeepSeekCoderModel(new DeepSeekProvider(apiKey));

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
{
var apiKey =
Environment.GetEnvironmentVariable("DEEPSEEK_API_KEY", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("DEEPSEEK_API_KEY is not set");
var llm = new DeepSeekCoderModel(new DeepSeekProvider(apiKey));

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
case ProviderType.Azure:
{
var apiKey =
Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_API_KEY is not set");
var apiEndpoint =
Environment.GetEnvironmentVariable("AZURE_OPENAI_API_ENDPOINT", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_API_ENDPOINT is not set");
var deploymentName =
Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_DEPLOYMENT_NAME is not set");

var configuration = new AzureOpenAiConfiguration
{
Id = deploymentName,
ApiKey = apiKey,
Endpoint = apiEndpoint,
};
var provider = new AzureOpenAiProvider(configuration);
var llm = new AzureOpenAiChatModel(provider, deploymentName);

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}
var apiKey =
Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_API_KEY is not set");
var apiEndpoint =
Environment.GetEnvironmentVariable("AZURE_OPENAI_API_ENDPOINT", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_API_ENDPOINT is not set");
var deploymentName =
Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME", EnvironmentVariableTarget.User) ??
throw new InvalidOperationException("AZURE_OPENAI_DEPLOYMENT_NAME is not set");

var configuration = new AzureOpenAiConfiguration
{
Id = deploymentName,
ApiKey = apiKey,
Endpoint = apiEndpoint,
};
var provider = new AzureOpenAiProvider(configuration);
var llm = new AzureOpenAiChatModel(provider, deploymentName);

// Use OpenAI embeddings for now because Anthropic doesn't have embeddings yet
var embeddings = new TextEmbeddingV3SmallModel(
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InconclusiveException("OPENAI_API_KEY is not set"));

return (llm, embeddings);
}

default:
throw new ArgumentOutOfRangeException();
Expand Down
2 changes: 1 addition & 1 deletion src/Meta/test/OpenAiTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ public void CountTokens()
new Gpt4OmniMiniModel("sk-random").CountTokens(text).Should().Be(4300);
new Gpt4Model("sk-random").CountTokens(text).Should().Be(4300);
}

[Test]
public async Task TestAudio()
{
Expand Down
Loading

0 comments on commit 6773f2b

Please sign in to comment.