From d829bca39c4f59db68b7d87ba88bf7c2d761019c Mon Sep 17 00:00:00 2001
From: Mateusz Charytoniuk <mateusz.charytoniuk@protonmail.com>
Date: Thu, 4 Apr 2024 23:36:32 +0200
Subject: [PATCH] chore: extract string prompt upgrade

---
 config.ini.example                            |  1 +
 docs/pages/index.md                           |  5 +--
 src/BackusNaurFormGrammar/InlineGrammar.php   | 22 +++++++++++
 .../SubjectActionGrammar.php                  |  2 +-
 src/LlamaCppClient.php                        | 11 +++---
 src/LlamaCppClientTest.php                    | 29 +++++++++++++-
 src/LlamaCppCompletionRequest.php             | 20 +++-------
 src/LlamaCppConfiguration.php                 |  1 +
 src/LlamaCppExtractString.php                 | 23 ++++++-----
 src/LlmChatHistory.php                        | 15 +++++++
 src/LlmChatHistoryRenderer.php                | 27 +++++++++++++
 src/LlmChatMessage.php                        | 13 +++++++
 src/LlmChatMessageRenderer.php                |  7 ++++
 .../ChatMLMessageRenderer.php                 | 21 ++++++++++
 .../ChatMLMessageRendererTest.php             | 32 +++++++++++++++
 .../MistralInstructMessageRenderer.php        | 20 ++++++++++
 src/LlmChatMessageRendererInterface.php       | 10 +++++
 src/LlmChatTemplateType.php                   | 13 +++++++
 src/LlmPromptTemplate/HermesChat.php          | 25 ------------
 .../LlamaCppConfigurationProvider.php         |  4 ++
 .../LlmChatMessageRendererProvider.php        | 39 +++++++++++++++++++
 21 files changed, 280 insertions(+), 60 deletions(-)
 create mode 100644 src/BackusNaurFormGrammar/InlineGrammar.php
 create mode 100644 src/LlmChatHistory.php
 create mode 100644 src/LlmChatHistoryRenderer.php
 create mode 100644 src/LlmChatMessage.php
 create mode 100644 src/LlmChatMessageRenderer.php
 create mode 100644 src/LlmChatMessageRenderer/ChatMLMessageRenderer.php
 create mode 100644 src/LlmChatMessageRenderer/ChatMLMessageRendererTest.php
 create mode 100644 src/LlmChatMessageRenderer/MistralInstructMessageRenderer.php
 create mode 100644 src/LlmChatMessageRendererInterface.php
 create mode 100644 src/LlmChatTemplateType.php
 delete mode 100644 src/LlmPromptTemplate/HermesChat.php
 create mode 100644 src/SingletonProvider/LlmChatMessageRendererProvider.php

diff --git a/config.ini.example b/config.ini.example
index 6b294304..cfad77be 100644
--- a/config.ini.example
+++ b/config.ini.example
@@ -20,6 +20,7 @@ grpc_php_plugin_bin = %DM_ROOT%/grpc_php_plugin
 protoc_bin = /usr/bin/protoc
 
 [llamacpp]
+chat_template = mistral_instruct
 host = 127.0.0.1
 port = 8081
 
diff --git a/docs/pages/index.md b/docs/pages/index.md
index b165868b..78752c0d 100644
--- a/docs/pages/index.md
+++ b/docs/pages/index.md
@@ -17,14 +17,11 @@ description: >
     <div class="homepage__content">
         <hgroup class="homepage__title">
             <h1>Resonance</h1>
-            <h2>PHP Framework That Solves Real-Life Issues</h2>
+            <h2>Build Web Applications with AI and ML Capabilities</h2>
             <p>
                 Designed from the ground up to facilitate interoperability and 
                 messaging between services in your infrastructure and beyond.
             </p>
-            <p>
-                Provides AI capabilities.
-            </p>
             <p>
                 Takes full advantage of asynchronous PHP. Built on top of 
                 Swoole.
diff --git a/src/BackusNaurFormGrammar/InlineGrammar.php b/src/BackusNaurFormGrammar/InlineGrammar.php
new file mode 100644
index 00000000..74ee6793
--- /dev/null
+++ b/src/BackusNaurFormGrammar/InlineGrammar.php
@@ -0,0 +1,22 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance\BackusNaurFormGrammar;
+
+use Distantmagic\Resonance\BackusNaurFormGrammar;
+
+readonly class InlineGrammar extends BackusNaurFormGrammar
+{
+    public function __construct(
+        /**
+         * @var non-empty-string
+         */
+        private string $grammar
+    ) {}
+
+    public function getGrammarContent(): string
+    {
+        return $this->grammar;
+    }
+}
diff --git a/src/BackusNaurFormGrammar/SubjectActionGrammar.php b/src/BackusNaurFormGrammar/SubjectActionGrammar.php
index 414fe3af..3860d8cb 100644
--- a/src/BackusNaurFormGrammar/SubjectActionGrammar.php
+++ b/src/BackusNaurFormGrammar/SubjectActionGrammar.php
@@ -37,7 +37,7 @@ readonly class SubjectActionGrammar extends BackusNaurFormGrammar
         $stringGrammar = <<<'STRING_GRAMMAR'
         "\"" (
             [^"\\] |
-             "\\" (["\\/bfnrt] | "u" [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F])
+             "\\" (["\\/bfnrt] | "u" [0-9a-zA-Z] [0-9a-zA-Z] [0-9a-zA-Z] [0-9a-zA-Z])
         )* "\""
         STRING_GRAMMAR;
 
diff --git a/src/LlamaCppClient.php b/src/LlamaCppClient.php
index 831878ab..c12be7f6 100644
--- a/src/LlamaCppClient.php
+++ b/src/LlamaCppClient.php
@@ -8,7 +8,6 @@ use CurlHandle;
 use Distantmagic\Resonance\Attribute\RequiresPhpExtension;
 use Distantmagic\Resonance\Attribute\Singleton;
 use Generator;
-use JsonSerializable;
 use Psr\Log\LoggerInterface;
 use RuntimeException;
 use Swoole\Coroutine;
@@ -20,6 +19,7 @@ readonly class LlamaCppClient implements LlamaCppClientInterface
 {
     public function __construct(
         private JsonSerializer $jsonSerializer,
+        private LlmChatHistoryRenderer $llmChatHistoryRenderer,
         private LlamaCppConfiguration $llamaCppConfiguration,
         private LlamaCppLinkBuilder $llamaCppLinkBuilder,
         private LoggerInterface $logger,
@@ -27,7 +27,8 @@ readonly class LlamaCppClient implements LlamaCppClientInterface
 
     public function generateCompletion(LlamaCppCompletionRequest $request): LlamaCppCompletionIterator
     {
-        $responseChunks = $this->streamResponse($request, '/completion');
+        $serializedRequest = $this->jsonSerializer->serialize($request->toJsonSerializable($this->llmChatHistoryRenderer));
+        $responseChunks = $this->streamResponse($serializedRequest, '/completion');
 
         return new LlamaCppCompletionIterator(
             $this->jsonSerializer,
@@ -75,7 +76,8 @@ readonly class LlamaCppClient implements LlamaCppClientInterface
      */
     public function generateInfill(LlamaCppInfillRequest $request): Generator
     {
-        $responseChunks = $this->streamResponse($request, '/infill');
+        $serializedRequest = $this->jsonSerializer->serialize($request);
+        $responseChunks = $this->streamResponse($serializedRequest, '/infill');
 
         foreach ($responseChunks as $responseChunk) {
             if ($responseChunk instanceof SwooleChannelIteratorError) {
@@ -166,10 +168,9 @@ readonly class LlamaCppClient implements LlamaCppClientInterface
     /**
      * @return SwooleChannelIterator<LlamaCppClientResponseChunk>
      */
-    private function streamResponse(JsonSerializable $request, string $path): SwooleChannelIterator
+    private function streamResponse(string $requestData, string $path): SwooleChannelIterator
     {
         $channel = new Channel(1);
-        $requestData = json_encode($request);
 
         SwooleCoroutineHelper::mustGo(function () use ($channel, $path, $requestData): void {
             $curlHandle = $this->createCurlHandle();
diff --git a/src/LlamaCppClientTest.php b/src/LlamaCppClientTest.php
index f9c6a767..5b5f697f 100644
--- a/src/LlamaCppClientTest.php
+++ b/src/LlamaCppClientTest.php
@@ -7,6 +7,7 @@ namespace Distantmagic\Resonance;
 use PHPUnit\Framework\Attributes\CoversClass;
 use PHPUnit\Framework\Attributes\Group;
 use PHPUnit\Framework\TestCase;
+use Swoole\Event;
 
 /**
  * @internal
@@ -17,7 +18,33 @@ final class LlamaCppClientTest extends TestCase
 {
     use TestsDependencyInectionContainerTrait;
 
-    public function test_request_header_is_parsed(): void
+    protected function tearDown(): void
+    {
+        Event::wait();
+    }
+
+    public function test_completion_is_generated(): void
+    {
+        $llamaCppClient = self::$container->make(LlamaCppClient::class);
+
+        SwooleCoroutineHelper::mustRun(static function () use ($llamaCppClient) {
+            $completion = $llamaCppClient->generateCompletion(new LlamaCppCompletionRequest(
+                llmChatHistory: new LlmChatHistory([
+                    new LlmChatMessage('user', 'Who are you? Answer in exactly two words.'),
+                ]),
+            ));
+
+            $ret = '';
+
+            foreach ($completion as $token) {
+                $ret .= (string) $token;
+            }
+
+            self::assertNotEmpty($ret);
+        });
+    }
+
+    public function test_health_status_is_checked(): void
     {
         $llamaCppClient = self::$container->make(LlamaCppClient::class);
 
diff --git a/src/LlamaCppCompletionRequest.php b/src/LlamaCppCompletionRequest.php
index 62fd4886..2604b6d6 100644
--- a/src/LlamaCppCompletionRequest.php
+++ b/src/LlamaCppCompletionRequest.php
@@ -4,23 +4,19 @@ declare(strict_types=1);
 
 namespace Distantmagic\Resonance;
 
-use JsonSerializable;
-
-readonly class LlamaCppCompletionRequest implements JsonSerializable
+readonly class LlamaCppCompletionRequest
 {
     public function __construct(
-        public LlmPromptTemplate $promptTemplate,
+        public LlmChatHistory $llmChatHistory,
         public ?BackusNaurFormGrammar $backusNaurFormGrammar = null,
-        public ?LlmPrompt $llmSystemPrompt = null,
     ) {}
 
-    public function jsonSerialize(): array
+    public function toJsonSerializable(LlmChatHistoryRenderer $llmChatHistoryRenderer): array
     {
         $parameters = [
             'cache_prompt' => true,
-            // 'n_predict' => 200,
-            'prompt' => $this->promptTemplate->getPromptTemplateContent(),
-            'stop' => $this->promptTemplate->getStopWords(),
+            'n_predict' => 128,
+            'prompt' => $llmChatHistoryRenderer->renderLlmChatHistory($this->llmChatHistory),
             'stream' => true,
         ];
 
@@ -28,12 +24,6 @@ readonly class LlamaCppCompletionRequest implements JsonSerializable
             $parameters['grammar'] = $this->backusNaurFormGrammar->getGrammarContent();
         }
 
-        if ($this->llmSystemPrompt) {
-            $parameters['system_prompt'] = [
-                'prompt' => $this->llmSystemPrompt->getPromptContent(),
-            ];
-        }
-
         return $parameters;
     }
 }
diff --git a/src/LlamaCppConfiguration.php b/src/LlamaCppConfiguration.php
index c1ca0bd2..926168b6 100644
--- a/src/LlamaCppConfiguration.php
+++ b/src/LlamaCppConfiguration.php
@@ -27,6 +27,7 @@ readonly class LlamaCppConfiguration
         #[SensitiveParameter]
         public string $host,
         #[SensitiveParameter]
+        public LlmChatTemplateType $llmChatTemplate,
         public int $port,
         #[SensitiveParameter]
         public string $scheme,
diff --git a/src/LlamaCppExtractString.php b/src/LlamaCppExtractString.php
index d76185ec..3d19c5bf 100644
--- a/src/LlamaCppExtractString.php
+++ b/src/LlamaCppExtractString.php
@@ -4,7 +4,7 @@ declare(strict_types=1);
 
 namespace Distantmagic\Resonance;
 
-use Distantmagic\Resonance\LlmPromptTemplate\MistralInstructChat;
+use Distantmagic\Resonance\BackusNaurFormGrammar\InlineGrammar;
 
 readonly class LlamaCppExtractString
 {
@@ -18,14 +18,19 @@ readonly class LlamaCppExtractString
     ): ?string {
         $completion = $this->llamaCppClient->generateCompletion(
             new LlamaCppCompletionRequest(
-                promptTemplate: new MistralInstructChat(<<<PROMPT
-                User is about to provide the $subject.
-                If user provides the $subject, repeat only that $subject, without any additional comment.
-                If user did not provide $subject or it is not certain, write the empty string: ""
-
-                User input:
-                $input
-                PROMPT),
+                backusNaurFormGrammar: new InlineGrammar('root ::= [0-9a-zA-Z\" ]+'),
+                llmChatHistory: new LlmChatHistory([
+                    new LlmChatMessage(
+                        actor: 'system',
+                        message: <<<PROMPT
+                        User is about to provide the $subject.
+                        If user provides the $subject, repeat only that $subject, without any additional comment.
+                        If user did not provide $subject or it is not certain, write the empty string: ""
+                        Respond only with provided $subject.
+                        PROMPT
+                    ),
+                    new LlmChatMessage('user', $input),
+                ]),
             ),
         );
 
diff --git a/src/LlmChatHistory.php b/src/LlmChatHistory.php
new file mode 100644
index 00000000..18d63547
--- /dev/null
+++ b/src/LlmChatHistory.php
@@ -0,0 +1,15 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+readonly class LlmChatHistory
+{
+    /**
+     * @param array<LlmChatMessage> $messages
+     */
+    public function __construct(
+        public array $messages,
+    ) {}
+}
diff --git a/src/LlmChatHistoryRenderer.php b/src/LlmChatHistoryRenderer.php
new file mode 100644
index 00000000..f8cf8c39
--- /dev/null
+++ b/src/LlmChatHistoryRenderer.php
@@ -0,0 +1,27 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+use Distantmagic\Resonance\Attribute\Singleton;
+
+#[Singleton]
+readonly class LlmChatHistoryRenderer
+{
+    public function __construct(
+        private LlmChatMessageRendererInterface $llmChatMessageRenderer,
+    ) {}
+
+    public function renderLlmChatHistory(
+        LlmChatHistory $llmChatHistory,
+    ): string {
+        $ret = '';
+
+        foreach ($llmChatHistory->messages as $message) {
+            $ret .= $this->llmChatMessageRenderer->renderLlmChatMessage($message)."\n";
+        }
+
+        return $ret;
+    }
+}
diff --git a/src/LlmChatMessage.php b/src/LlmChatMessage.php
new file mode 100644
index 00000000..68181ef2
--- /dev/null
+++ b/src/LlmChatMessage.php
@@ -0,0 +1,13 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+readonly class LlmChatMessage
+{
+    public function __construct(
+        public string $actor,
+        public string $message,
+    ) {}
+}
diff --git a/src/LlmChatMessageRenderer.php b/src/LlmChatMessageRenderer.php
new file mode 100644
index 00000000..e833e609
--- /dev/null
+++ b/src/LlmChatMessageRenderer.php
@@ -0,0 +1,7 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+abstract readonly class LlmChatMessageRenderer implements LlmChatMessageRendererInterface {}
diff --git a/src/LlmChatMessageRenderer/ChatMLMessageRenderer.php b/src/LlmChatMessageRenderer/ChatMLMessageRenderer.php
new file mode 100644
index 00000000..2730bf48
--- /dev/null
+++ b/src/LlmChatMessageRenderer/ChatMLMessageRenderer.php
@@ -0,0 +1,21 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance\LlmChatMessageRenderer;
+
+use Distantmagic\Resonance\Attribute\Singleton;
+use Distantmagic\Resonance\LlmChatMessage;
+use Distantmagic\Resonance\LlmChatMessageRenderer;
+
+#[Singleton]
+readonly class ChatMLMessageRenderer extends LlmChatMessageRenderer
+{
+    public function renderLlmChatMessage(LlmChatMessage $llmChatMessage): string
+    {
+        return <<<FORMATTED
+        <|im_start|>{$llmChatMessage->actor}
+        {$llmChatMessage->message}<|im_end|>
+        FORMATTED;
+    }
+}
diff --git a/src/LlmChatMessageRenderer/ChatMLMessageRendererTest.php b/src/LlmChatMessageRenderer/ChatMLMessageRendererTest.php
new file mode 100644
index 00000000..8877bdb1
--- /dev/null
+++ b/src/LlmChatMessageRenderer/ChatMLMessageRendererTest.php
@@ -0,0 +1,32 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance\LlmChatMessageRenderer;
+
+use Distantmagic\Resonance\LlmChatMessage;
+use PHPUnit\Framework\Attributes\CoversClass;
+use PHPUnit\Framework\TestCase;
+
+/**
+ * @internal
+ */
+#[CoversClass(ChatMLMessageRenderer::class)]
+final class ChatMLMessageRendererTest extends TestCase
+{
+    public function test_chatml_message_is_rendered(): void
+    {
+        $chatMessageRenderer = new ChatMLMessageRenderer();
+
+        self::assertSame(
+            <<<'EXPECTED_MESSAGE'
+            <|im_start|>system
+            How can I help?<|im_end|>
+            EXPECTED_MESSAGE,
+            $chatMessageRenderer->renderLlmChatMessage(new LlmChatMessage(
+                actor: 'system',
+                message: 'How can I help?',
+            ))
+        );
+    }
+}
diff --git a/src/LlmChatMessageRenderer/MistralInstructMessageRenderer.php b/src/LlmChatMessageRenderer/MistralInstructMessageRenderer.php
new file mode 100644
index 00000000..327f7863
--- /dev/null
+++ b/src/LlmChatMessageRenderer/MistralInstructMessageRenderer.php
@@ -0,0 +1,20 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance\LlmChatMessageRenderer;
+
+use Distantmagic\Resonance\Attribute\Singleton;
+use Distantmagic\Resonance\LlmChatMessage;
+use Distantmagic\Resonance\LlmChatMessageRenderer;
+
+#[Singleton]
+readonly class MistralInstructMessageRenderer extends LlmChatMessageRenderer
+{
+    public function renderLlmChatMessage(LlmChatMessage $llmChatMessage): string
+    {
+        return <<<FORMATTED
+        [INST]{$llmChatMessage->message}[/INST]
+        FORMATTED;
+    }
+}
diff --git a/src/LlmChatMessageRendererInterface.php b/src/LlmChatMessageRendererInterface.php
new file mode 100644
index 00000000..f5c83fdd
--- /dev/null
+++ b/src/LlmChatMessageRendererInterface.php
@@ -0,0 +1,10 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+interface LlmChatMessageRendererInterface
+{
+    public function renderLlmChatMessage(LlmChatMessage $llmChatMessage): string;
+}
diff --git a/src/LlmChatTemplateType.php b/src/LlmChatTemplateType.php
new file mode 100644
index 00000000..a78bf2b9
--- /dev/null
+++ b/src/LlmChatTemplateType.php
@@ -0,0 +1,13 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance;
+
+enum LlmChatTemplateType: string
+{
+    use EnumValuesTrait;
+
+    case ChatML = 'chatml';
+    case MistralInstruct = 'mistral_instruct';
+}
diff --git a/src/LlmPromptTemplate/HermesChat.php b/src/LlmPromptTemplate/HermesChat.php
deleted file mode 100644
index facaaba7..00000000
--- a/src/LlmPromptTemplate/HermesChat.php
+++ /dev/null
@@ -1,25 +0,0 @@
-<?php
-
-declare(strict_types=1);
-
-namespace Distantmagic\Resonance\LlmPromptTemplate;
-
-use Distantmagic\Resonance\LlmPromptTemplate;
-
-readonly class HermesChat extends LlmPromptTemplate
-{
-    public function __construct(private string $prompt) {}
-
-    public function getPromptTemplateContent(): string
-    {
-        return sprintf(
-            '<|im_start|%s<|im_end|>',
-            $this->prompt,
-        );
-    }
-
-    public function getStopWords(): array
-    {
-        return ['<|im_start|>', '<|im_end|>'];
-    }
-}
diff --git a/src/SingletonProvider/ConfigurationProvider/LlamaCppConfigurationProvider.php b/src/SingletonProvider/ConfigurationProvider/LlamaCppConfigurationProvider.php
index f78f44c1..db5bdeef 100644
--- a/src/SingletonProvider/ConfigurationProvider/LlamaCppConfigurationProvider.php
+++ b/src/SingletonProvider/ConfigurationProvider/LlamaCppConfigurationProvider.php
@@ -12,11 +12,13 @@ use Distantmagic\Resonance\Constraint\NumberConstraint;
 use Distantmagic\Resonance\Constraint\ObjectConstraint;
 use Distantmagic\Resonance\Constraint\StringConstraint;
 use Distantmagic\Resonance\LlamaCppConfiguration;
+use Distantmagic\Resonance\LlmChatTemplateType;
 use Distantmagic\Resonance\SingletonProvider\ConfigurationProvider;
 
 /**
  * @template-extends ConfigurationProvider<LlamaCppConfiguration, array{
  *     api_key: null|non-empty-string,
+ *     chat_template: non-empty-string,
  *     completion_token_timeout: float,
  *     host: non-empty-string,
  *     port: int,
@@ -31,6 +33,7 @@ final readonly class LlamaCppConfigurationProvider extends ConfigurationProvider
         return new ObjectConstraint(
             properties: [
                 'api_key' => (new StringConstraint())->default(null),
+                'chat_template' => new EnumConstraint(LlmChatTemplateType::values()),
                 'completion_token_timeout' => (new NumberConstraint())->default(1.0),
                 'host' => new StringConstraint(),
                 'port' => new IntegerConstraint(),
@@ -51,6 +54,7 @@ final readonly class LlamaCppConfigurationProvider extends ConfigurationProvider
             completionTokenTimeout: $validatedData['completion_token_timeout'],
             host: $validatedData['host'],
             port: $validatedData['port'],
+            llmChatTemplate: LlmChatTemplateType::from($validatedData['chat_template']),
             scheme: $validatedData['scheme'],
         );
     }
diff --git a/src/SingletonProvider/LlmChatMessageRendererProvider.php b/src/SingletonProvider/LlmChatMessageRendererProvider.php
new file mode 100644
index 00000000..6e34fa97
--- /dev/null
+++ b/src/SingletonProvider/LlmChatMessageRendererProvider.php
@@ -0,0 +1,39 @@
+<?php
+
+declare(strict_types=1);
+
+namespace Distantmagic\Resonance\SingletonProvider;
+
+use Distantmagic\Resonance\Attribute\Singleton;
+use Distantmagic\Resonance\LlamaCppConfiguration;
+use Distantmagic\Resonance\LlmChatMessageRenderer\ChatMLMessageRenderer;
+use Distantmagic\Resonance\LlmChatMessageRenderer\MistralInstructMessageRenderer;
+use Distantmagic\Resonance\LlmChatMessageRendererInterface;
+use Distantmagic\Resonance\LlmChatTemplateType;
+use Distantmagic\Resonance\PHPProjectFiles;
+use Distantmagic\Resonance\SingletonContainer;
+use Distantmagic\Resonance\SingletonProvider;
+use Nette\PhpGenerator\Printer;
+use RuntimeException;
+
+/**
+ * @template-extends SingletonProvider<Printer>
+ */
+#[Singleton(provides: LlmChatMessageRendererInterface::class)]
+final readonly class LlmChatMessageRendererProvider extends SingletonProvider
+{
+    public function __construct(
+        private ChatMLMessageRenderer $chatMLMessageRenderer,
+        private LlamaCppConfiguration $llamaCppConfiguration,
+        private MistralInstructMessageRenderer $mistralInstructMessageRenderer,
+    ) {}
+
+    public function provide(SingletonContainer $singletons, PHPProjectFiles $phpProjectFiles): LlmChatMessageRendererInterface
+    {
+        return match ($this->llamaCppConfiguration->llmChatTemplate) {
+            LlmChatTemplateType::ChatML => $this->chatMLMessageRenderer,
+            LlmChatTemplateType::MistralInstruct => $this->mistralInstructMessageRenderer,
+            default => throw new RuntimeException('Unsupported llm chat message format: '.$this->llamaCppConfiguration->llmChatTemplate->value),
+        };
+    }
+}
-- 
GitLab