Skip to content

Commit

Permalink
Modify the ChatModelListenerIT#should_listen_error test case.
Browse files Browse the repository at this point in the history
  • Loading branch information
lixw committed Dec 3, 2024
1 parent e59ed1a commit 03656c2
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 74 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,37 +9,38 @@

class XinferenceChatModelListenerIT extends ChatModelListenerIT {

@Override
protected ChatLanguageModel createModel(ChatModelListener listener) {
return XinferenceChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.apiKey(AbstractInferenceChatModelInfrastructure.apiKey())
.modelName(modelName())
.temperature(temperature())
.topP(topP())
.maxTokens(maxTokens())
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}
@Override
protected ChatLanguageModel createModel(ChatModelListener listener) {
return XinferenceChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.apiKey(AbstractInferenceChatModelInfrastructure.apiKey())
.modelName(modelName())
.temperature(temperature())
.topP(topP())
.maxTokens(maxTokens())
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}

@Override
protected String modelName() {
return AbstractInferenceChatModelInfrastructure.modelName();
}
@Override
protected String modelName() {
return AbstractInferenceChatModelInfrastructure.modelName();
}

@Override
protected ChatLanguageModel createFailingModel(ChatModelListener listener) {
return XinferenceChatModel.builder()
.apiKey("banana")
.maxRetries(1)
.listeners(singletonList(listener))
.build();
}
@Override
protected ChatLanguageModel createFailingModel(ChatModelListener listener) {
return XinferenceChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.modelName("llama3.1")
.maxRetries(1)
.listeners(singletonList(listener))
.build();
}

@Override
protected Class<? extends Exception> expectedExceptionClass() {
return XinferenceHttpException.class;
}
@Override
protected Class<? extends Exception> expectedExceptionClass() {
return XinferenceHttpException.class;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,48 +9,50 @@

class XinferenceStreamingChatModelListenerIT extends StreamingChatModelListenerIT {

@Override
protected StreamingChatLanguageModel createModel(ChatModelListener listener) {
return XinferenceStreamingChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.apiKey(AbstractInferenceChatModelInfrastructure.apiKey())
.modelName(modelName())
.temperature(temperature())
.topP(topP())
.maxTokens(maxTokens())
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}

/**
* Streaming support for tool calls is available only when using Qwen models with vLLM backend or GLM4-chat models without vLLM backend.
*
* @return
*/
@Override
protected boolean supportsTools() {
return false;
}

@Override
protected String modelName() {
return AbstractInferenceChatModelInfrastructure.modelName();
}

@Override
protected StreamingChatLanguageModel createFailingModel(ChatModelListener listener) {
return XinferenceStreamingChatModel.builder()
.apiKey("banana")
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}

@Override
protected Class<? extends Exception> expectedExceptionClass() {
return XinferenceHttpException.class;
}
@Override
protected StreamingChatLanguageModel createModel(ChatModelListener listener) {
return XinferenceStreamingChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.apiKey(AbstractInferenceChatModelInfrastructure.apiKey())
.modelName(modelName())
.temperature(temperature())
.topP(topP())
.maxTokens(maxTokens())
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}

/**
* Streaming support for tool calls is available only when using Qwen models with vLLM backend or
* GLM4-chat models without vLLM backend.
*
* @return
*/
@Override
protected boolean supportsTools() {
return false;
}

@Override
protected String modelName() {
return AbstractInferenceChatModelInfrastructure.modelName();
}

@Override
protected StreamingChatLanguageModel createFailingModel(ChatModelListener listener) {
return XinferenceStreamingChatModel.builder()
.baseUrl(AbstractInferenceChatModelInfrastructure.baseUrl())
.modelName("llama3.1")
.logRequests(true)
.logResponses(true)
.listeners(singletonList(listener))
.build();
}

@Override
protected Class<? extends Exception> expectedExceptionClass() {
return XinferenceHttpException.class;
}
}

0 comments on commit 03656c2

Please sign in to comment.