diff --git a/src/Model/Language/Llama.php b/src/Model/Language/Llama.php index cd04186a..d5ba4288 100644 --- a/src/Model/Language/Llama.php +++ b/src/Model/Language/Llama.php @@ -4,8 +4,14 @@ namespace PhpLlm\LlmChain\Model\Language; +use PhpLlm\LlmChain\Exception\RuntimeException; use PhpLlm\LlmChain\LanguageModel; +use PhpLlm\LlmChain\Message\AssistantMessage; +use PhpLlm\LlmChain\Message\Content\Image; +use PhpLlm\LlmChain\Message\Content\Text; use PhpLlm\LlmChain\Message\MessageBag; +use PhpLlm\LlmChain\Message\SystemMessage; +use PhpLlm\LlmChain\Message\UserMessage; use PhpLlm\LlmChain\Platform\Ollama; use PhpLlm\LlmChain\Platform\Replicate; use PhpLlm\LlmChain\Response\TextResponse; @@ -23,13 +29,94 @@ public function call(MessageBag $messages, array $options = []): TextResponse $endpoint = $this->platform instanceof Replicate ? 'predictions' : 'chat'; $response = $this->platform->request('meta/meta-llama-3.1-405b-instruct', $endpoint, [ - 'system' => $systemMessage?->content, - 'prompt' => $messages->withoutSystemMessage()->getIterator()->current()->content[0]->text, // @phpstan-ignore-line TODO: Multiple messages + 'system' => self::convertMessage($systemMessage ?? new SystemMessage('')), + 'prompt' => self::convertToPrompt($messages->withoutSystemMessage()), ]); return new TextResponse(implode('', $response['output'])); } + /** + * @todo make method private, just for testing, or create a MessageBag to LLama convert class :thinking: + */ + public static function convertToPrompt(MessageBag $messageBag): string + { + $messages = []; + + /** @var UserMessage|SystemMessage|AssistantMessage $message */ + foreach ($messageBag->getIterator() as $message) { + $messages[] = self::convertMessage($message); + } + + $messages = array_filter($messages, fn ($message) => '' !== $message); + + return trim(implode(PHP_EOL.PHP_EOL, $messages)).PHP_EOL.PHP_EOL.'<|start_header_id|>assistant<|end_header_id|>'; + } + + /** + * @todo make method private, just for testing + */ + public static function convertMessage(UserMessage|SystemMessage|AssistantMessage $message): string + { + if ($message instanceof SystemMessage) { + return trim(<<<|start_header_id|>system<|end_header_id|> + +{$message->content}<|eot_id|> +SYSTEM); + } + + if ($message instanceof AssistantMessage) { + if ('' === $message->content || null === $message->content) { + return ''; + } + + return trim(<<{$message->getRole()->value}<|end_header_id|> + +{$message->content}<|eot_id|> +ASSISTANT); + } + + if ($message instanceof UserMessage) { + $count = count($message->content); + + $contentParts = []; + if ($count > 1) { + foreach ($message->content as $value) { + if ($value instanceof Text) { + $contentParts[] = $value->text; + } + + if ($value instanceof Image) { + $contentParts[] = $value->url; + } + } + } elseif (1 === $count) { + $value = $message->content[0]; + if ($value instanceof Text) { + $contentParts[] = $value->text; + } + + if ($value instanceof Image) { + $contentParts[] = $value->url; + } + } else { + throw new RuntimeException('Unsupported message type.'); + } + + $content = implode(PHP_EOL, $contentParts); + + return trim(<<{$message->getRole()->value}<|end_header_id|> + +{$content}<|eot_id|> +USER); + } + + throw new RuntimeException('Unsupported message type.'); // @phpstan-ignore-line + } + public function supportsToolCalling(): bool { return false; // it does, but implementation here is still open. diff --git a/tests/Model/Language/LlamaTest.php b/tests/Model/Language/LlamaTest.php new file mode 100644 index 00000000..94ff87fa --- /dev/null +++ b/tests/Model/Language/LlamaTest.php @@ -0,0 +1,133 @@ +append($message[1]); + } + + self::assertSame(<<<|start_header_id|>system<|end_header_id|> + +You are a helpful chatbot.<|eot_id|> + +<|start_header_id|>user<|end_header_id|> + +Hello, how are you?<|eot_id|> + +<|start_header_id|>user<|end_header_id|> + +Hello, how are you? +What is your name?<|eot_id|> + +<|start_header_id|>user<|end_header_id|> + +Hello, how are you? +What is your name? +https://example.com/image.jpg<|eot_id|> + +<|start_header_id|>assistant<|end_header_id|> + +I am an assistant.<|eot_id|> + +<|start_header_id|>assistant<|end_header_id|> +EXPECTED, + (new Llama(new Ollama(new MockHttpClient(), 'http://example.com/')))->convertToPrompt($messageBag) + ); + } + + #[Test] + #[DataProvider('provideMessages')] + public function convertMessage(string $expected, UserMessage|SystemMessage|AssistantMessage $message): void + { + self::assertSame( + $expected, + (new Llama(new Ollama(new MockHttpClient(), 'http://example.com/')))->convertMessage($message) + ); + } + + /** + * @return iterable + */ + public static function provideMessages(): iterable + { + yield 'System message' => [ + <<<|start_header_id|>system<|end_header_id|> + +You are a helpful chatbot.<|eot_id|> +SYSTEM, + Message::forSystem('You are a helpful chatbot.'), + ]; + + yield 'UserMessage' => [ + <<user<|end_header_id|> + +Hello, how are you?<|eot_id|> +USER, + Message::ofUser('Hello, how are you?'), + ]; + + yield 'UserMessage with two texts' => [ + <<user<|end_header_id|> + +Hello, how are you? +What is your name?<|eot_id|> +USER, + Message::ofUser('Hello, how are you?', 'What is your name?'), + ]; + + yield 'UserMessage with two texts and one image' => [ + <<user<|end_header_id|> + +Hello, how are you? +What is your name? +https://example.com/image.jpg<|eot_id|> +USER, + Message::ofUser('Hello, how are you?', 'What is your name?', new Image('https://example.com/image.jpg')), + ]; + + yield 'AssistantMessage' => [ + <<assistant<|end_header_id|> + +I am an assistant.<|eot_id|> +ASSISTANT, + new AssistantMessage('I am an assistant.'), + ]; + + yield 'AssistantMessage with null content' => [ + '', + new AssistantMessage(), + ]; + } +}