From 3302b5071001cf392b0158a46390ddb77bd2aa54 Mon Sep 17 00:00:00 2001 From: Daria Fokina Date: Tue, 3 Oct 2023 10:44:58 +0200 Subject: [PATCH] doc cleanup (whitespace) --- .../instructor_embedders/instructor_document_embedder.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/components/instructor-embedders/instructor_embedders/instructor_document_embedder.py b/components/instructor-embedders/instructor_embedders/instructor_document_embedder.py index 3724ef9c4..5981b80f2 100644 --- a/components/instructor-embedders/instructor_embedders/instructor_document_embedder.py +++ b/components/instructor-embedders/instructor_embedders/instructor_document_embedder.py @@ -39,8 +39,7 @@ def __init__( "Represent the 'domain' 'text_type' for 'task_objective'", where: - "domain" is optional, and it specifies the domain of the text, e.g., science, finance, medicine, etc. - "text_type" is required, and it specifies the encoding unit, e.g., sentence, document, paragraph, etc. - - "task_objective" is optional, and it specifies the objective of embedding, e.g., retrieve a document, - classify the sentence, etc. + - "task_objective" is optional, and it specifies the objective of embedding, e.g., retrieve a document, classify the sentence, etc. Check some examples of instructions here: https://github.com/xlang-ai/instructor-embedding#use-cases :param batch_size: Number of strings to encode at once. :param progress_bar: If true, displays progress bar during embedding.