Skip to content

Commit

Permalink
Allow prompts without an instruction block
Browse files Browse the repository at this point in the history
  • Loading branch information
s-jse committed Oct 24, 2024
1 parent b5867b1 commit c4adcfb
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 4 deletions.
13 changes: 9 additions & 4 deletions chainlite/load_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ def _split_prompt_to_blocks(prompt: str) -> List[Tuple[str, str]]:

# check the prompt format is correct
assert (
len([b for b in block_indices if b[1] == "instruction"]) == 1
), "Prompts should contain exactly one instruction block"
len([b for b in block_indices if b[1] == "instruction"]) <= 1
), "Prompts should contain at most one instruction block"
num_distillation_instruction = len(
[b for b in block_indices if b[1] == "distillation_instruction"]
)
Expand All @@ -166,12 +166,12 @@ def _split_prompt_to_blocks(prompt: str) -> List[Tuple[str, str]]:
block_indices_with_end = block_indices + [(len(prompt), "end", "end")]
blocks = []
for i in range(len(block_indices)):
block_string = prompt[
block_content = prompt[
block_indices_with_end[i][0]
+ len(block_indices_with_end[i][2]) : block_indices_with_end[i + 1][0]
].strip()

blocks.append((block_indices_with_end[i][1], block_string))
blocks.append((block_indices_with_end[i][1], block_content))

return blocks

Expand All @@ -181,6 +181,11 @@ def _prompt_blocks_to_chat_messages(
) -> Tuple[ChatPromptTemplate, str | None]:
message_prompt_templates = []
distillation_instruction = None

# Add an instruction block if it is not present
if len([b for b in blocks if b[0] == "instruction"]) == 0:
blocks = [("instruction", "")] + blocks

if is_distilled:
assert "distillation_instruction" in [
b[0] for b in blocks
Expand Down
13 changes: 13 additions & 0 deletions tests/test_llm_generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,19 @@ async def test_string_prompts():
temperature=0,
).ainvoke({"variable": "Y"})
assert "The value of Y is six" in response

# Without instruction block
response = await llm_generation_chain(
template_file="",
template_blocks=[
("input", "what is X?"),
("output", "The value of X is one"),
("input", "what is {{ variable }}?"),
],
engine=test_engine,
max_tokens=10,
temperature=0,
).ainvoke({"variable": "Y"})
write_prompt_logs_to_file("tests/llm_input_outputs.jsonl")


Expand Down

0 comments on commit c4adcfb

Please sign in to comment.