diff --git a/examples/groq/chat.php b/examples/groq/chat.php new file mode 100644 index 00000000..1f16cd14 --- /dev/null +++ b/examples/groq/chat.php @@ -0,0 +1,36 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +use Symfony\AI\Agent\Agent; +use Symfony\AI\Platform\Bridge\Groq\Llama; +use Symfony\AI\Platform\Bridge\Groq\PlatformFactory; +use Symfony\AI\Platform\Message\Message; +use Symfony\AI\Platform\Message\MessageBag; +use Symfony\Component\Dotenv\Dotenv; + +require_once dirname(__DIR__, 2).'/vendor/autoload.php'; +(new Dotenv())->loadEnv(dirname(__DIR__).'/.env'); + +$platform = PlatformFactory::create($_SERVER['GROQ_API_KEY']); +$model = new Llama(Llama::LLAMA3_70B, [ + 'temperature' => 0.5, // default options for the model +]); + +$agent = new Agent($platform, $model); +$messages = new MessageBag( + Message::forSystem('You are a pirate and you write funny.'), + Message::ofUser('What is the Symfony framework?'), +); +$response = $agent->call($messages, [ + 'max_tokens' => 500, // specific options just for this call +]); + +echo $response->getContent().\PHP_EOL; diff --git a/src/platform/doc/index.rst b/src/platform/doc/index.rst index d13ea15f..564ba3e7 100644 --- a/src/platform/doc/index.rst +++ b/src/platform/doc/index.rst @@ -72,8 +72,16 @@ usually defined by the specific models and their documentation. * **Language Models** * `OpenAI's GPT`_ with `OpenAI`_ and `Azure`_ as Platform * `Anthropic's Claude`_ with `Anthropic`_ and `AWS Bedrock`_ as Platform - * `Meta's Llama`_ with `Azure`_, `Ollama`_, `Replicate`_ and `AWS Bedrock`_ as Platform + * `Meta's Llama`_ with `Azure`_, `Ollama`_, `Replicate`_, `AWS Bedrock`_ and `Groq`_ as Platform + * `Google's Gemma`_ with `Groq`_ as Platform + * `Mistral's Mixtral`_ with `Groq`_ as Platform + * `Alibaba's Qwen`_ with `Groq`_ as Platform + +.. note:: + + While Groq supports various models, specific API requirements or limitations might apply. Refer to Groq's official documentation for details. * `Google's Gemini`_ with `Google`_ and `OpenRouter`_ as Platform + * `Groq's Llama`_ with `Groq`_ as Platform * `DeepSeek's R1`_ with `OpenRouter`_ as Platform * `Amazon's Nova`_ with `AWS Bedrock`_ as Platform * `Mistral's Mistral`_ with `Mistral`_ as Platform @@ -304,6 +312,11 @@ which can be useful to speed up the processing:: .. _`Google's Gemini`: https://gemini.google.com/ .. _`Google`: https://ai.google.dev/ .. _`OpenRouter`: https://www.openrouter.com/ +.. _`Groq's Llama`: https://console.groq.com/docs/overview +.. _`Groq`: https://groq.com/ +.. _`Google's Gemma`: https://ai.google.dev/models/gemma +.. _`Mistral's Mixtral`: https://mistral.ai/news/mixtral-8x7b/ +.. _`Alibaba's Qwen`: https://qwen.ai .. _`DeepSeek's R1`: https://www.deepseek.com/ .. _`Amazon's Nova`: https://nova.amazon.com .. _`Mistral's Mistral`: https://www.mistral.ai/ diff --git a/src/platform/src/Bridge/Groq/Llama.php b/src/platform/src/Bridge/Groq/Llama.php new file mode 100644 index 00000000..58519955 --- /dev/null +++ b/src/platform/src/Bridge/Groq/Llama.php @@ -0,0 +1,46 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Groq; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; + +/** + * @author Christopher Hertel + * @author Dave Hulbert + */ +class Llama extends Model +{ + public const LLAMA3_8B = 'llama3-8b-8192'; + public const LLAMA3_70B = 'llama3-70b-8192'; + public const LLAMA2_70B = 'llama2-70b-4096'; + public const MIXTRAL_8X7B = 'mistral-saba-24b'; + public const GEMMA_7B = 'gemma2-9b-it'; + public const QWEN_32B = 'qwen/qwen3-32b'; + + /** + * @param array $options The default options for the model usage + */ + public function __construct( + string $name = self::LLAMA3_70B, + array $options = ['temperature' => 1.0], + ) { + $capabilities = [ + Capability::INPUT_MESSAGES, + Capability::OUTPUT_TEXT, + Capability::OUTPUT_STREAMING, + Capability::TOOL_CALLING, + ]; + + parent::__construct($name, $capabilities, $options); + } +} diff --git a/src/platform/src/Bridge/Groq/Llama/ModelClient.php b/src/platform/src/Bridge/Groq/Llama/ModelClient.php new file mode 100644 index 00000000..84a6f0cc --- /dev/null +++ b/src/platform/src/Bridge/Groq/Llama/ModelClient.php @@ -0,0 +1,52 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Groq\Llama; + +use Symfony\AI\Platform\Bridge\Groq\Llama; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\ModelClientInterface as PlatformResponseFactory; +use Symfony\Component\HttpClient\EventSourceHttpClient; +use Symfony\Contracts\HttpClient\HttpClientInterface; +use Symfony\Contracts\HttpClient\ResponseInterface; +use Webmozart\Assert\Assert; + +/** + * @author Christopher Hertel + * @author Dave Hulbert + */ +final readonly class ModelClient implements PlatformResponseFactory +{ + private EventSourceHttpClient $httpClient; + + public function __construct( + HttpClientInterface $httpClient, + #[\SensitiveParameter] + private string $apiKey, + ) { + $this->httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); + Assert::stringNotEmpty($apiKey, 'The API key must not be empty.'); + Assert::startsWith($apiKey, 'gsk_', 'The API key must start with "gsk_".'); + } + + public function supports(Model $model): bool + { + return $model instanceof Llama; + } + + public function request(Model $model, array|string $payload, array $options = []): ResponseInterface + { + return $this->httpClient->request('POST', 'https://api.groq.com/openai/v1/chat/completions', [ + 'auth_bearer' => $this->apiKey, + 'json' => array_merge($options, $payload), + ]); + } +} diff --git a/src/platform/src/Bridge/Groq/Llama/ResponseConverter.php b/src/platform/src/Bridge/Groq/Llama/ResponseConverter.php new file mode 100644 index 00000000..3daad0af --- /dev/null +++ b/src/platform/src/Bridge/Groq/Llama/ResponseConverter.php @@ -0,0 +1,204 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Groq\Llama; + +use Symfony\AI\Platform\Bridge\Groq\Llama; +use Symfony\AI\Platform\Exception\ContentFilterException; +use Symfony\AI\Platform\Exception\RuntimeException; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Response\Choice; +use Symfony\AI\Platform\Response\ChoiceResponse; +use Symfony\AI\Platform\Response\ResponseInterface as LlmResponse; +use Symfony\AI\Platform\Response\StreamResponse; +use Symfony\AI\Platform\Response\TextResponse; +use Symfony\AI\Platform\Response\ToolCall; +use Symfony\AI\Platform\Response\ToolCallResponse; +use Symfony\AI\Platform\ResponseConverterInterface as PlatformResponseConverter; +use Symfony\Component\HttpClient\Chunk\ServerSentEvent; +use Symfony\Component\HttpClient\EventSourceHttpClient; +use Symfony\Component\HttpClient\Exception\JsonException; +use Symfony\Contracts\HttpClient\Exception\ClientExceptionInterface; +use Symfony\Contracts\HttpClient\ResponseInterface as HttpResponse; + +/** + * @author Christopher Hertel + * @author Dave Hulbert + */ +final class ResponseConverter implements PlatformResponseConverter +{ + public function supports(Model $model): bool + { + return $model instanceof Llama; + } + + public function convert(HttpResponse $response, array $options = []): LlmResponse + { + if ($options['stream'] ?? false) { + return new StreamResponse($this->convertStream($response)); + } + + try { + $data = $response->toArray(); + } catch (ClientExceptionInterface $e) { + $data = $response->toArray(throw: false); + + if (isset($data['error']['code']) && 'content_filter' === $data['error']['code']) { + throw new ContentFilterException(message: $data['error']['message'], previous: $e); + } + + throw $e; + } + + if (!isset($data['choices'])) { + throw new RuntimeException('Response does not contain choices'); + } + + /** @var Choice[] $choices */ + $choices = array_map($this->convertChoice(...), $data['choices']); + + if (1 !== \count($choices)) { + return new ChoiceResponse(...$choices); + } + + if ($choices[0]->hasToolCall()) { + return new ToolCallResponse(...$choices[0]->getToolCalls()); + } + + return new TextResponse($choices[0]->getContent()); + } + + private function convertStream(HttpResponse $response): \Generator + { + $toolCalls = []; + foreach ((new EventSourceHttpClient())->stream($response) as $chunk) { + if (!$chunk instanceof ServerSentEvent || '[DONE]' === $chunk->getData()) { + continue; + } + + try { + $data = $chunk->getArrayData(); + } catch (JsonException) { + // try catch only needed for Symfony 6.4 + continue; + } + + if ($this->streamIsToolCall($data)) { + $toolCalls = $this->convertStreamToToolCalls($toolCalls, $data); + } + + if ([] !== $toolCalls && $this->isToolCallsStreamFinished($data)) { + yield new ToolCallResponse(...array_map($this->convertToolCall(...), $toolCalls)); + } + + if (!isset($data['choices'][0]['delta']['content'])) { + continue; + } + + yield $data['choices'][0]['delta']['content']; + } + } + + /** + * @param array $toolCalls + * @param array $data + * + * @return array + */ + private function convertStreamToToolCalls(array $toolCalls, array $data): array + { + if (!isset($data['choices'][0]['delta']['tool_calls'])) { + return $toolCalls; + } + + foreach ($data['choices'][0]['delta']['tool_calls'] as $i => $toolCall) { + if (isset($toolCall['id'])) { + // initialize tool call + $toolCalls[$i] = [ + 'id' => $toolCall['id'], + 'function' => $toolCall['function'], + ]; + continue; + } + + // add arguments delta to tool call + $toolCalls[$i]['function']['arguments'] .= $toolCall['function']['arguments']; + } + + return $toolCalls; + } + + /** + * @param array $data + */ + private function streamIsToolCall(array $data): bool + { + return isset($data['choices'][0]['delta']['tool_calls']); + } + + /** + * @param array $data + */ + private function isToolCallsStreamFinished(array $data): bool + { + return isset($data['choices'][0]['finish_reason']) && 'tool_calls' === $data['choices'][0]['finish_reason']; + } + + /** + * @param array{ + * index: integer, + * message: array{ + * role: 'assistant', + * content: ?string, + * tool_calls: array{ + * id: string, + * type: 'function', + * function: array{ + * name: string, + * arguments: string + * }, + * }, + * refusal: ?mixed + * }, + * logprobs: string, + * finish_reason: 'stop'|'length'|'tool_calls'|'content_filter', + * } $choice + */ + private function convertChoice(array $choice): Choice + { + if ('tool_calls' === $choice['finish_reason']) { + return new Choice(toolCalls: array_map([$this, 'convertToolCall'], $choice['message']['tool_calls'])); + } + + if (\in_array($choice['finish_reason'], ['stop', 'length'], true)) { + return new Choice($choice['message']['content']); + } + + throw new RuntimeException(\sprintf('Unsupported finish reason "%s".', $choice['finish_reason'])); + } + + /** + * @param array{ + * id: string, + * type: 'function', + * function: array{ + * name: string, + * arguments: string + * } + * } $toolCall + */ + private function convertToolCall(array $toolCall): ToolCall + { + $arguments = json_decode($toolCall['function']['arguments'], true, \JSON_THROW_ON_ERROR); + + return new ToolCall($toolCall['id'], $toolCall['function']['name'], $arguments); + } +} diff --git a/src/platform/src/Bridge/Groq/PlatformFactory.php b/src/platform/src/Bridge/Groq/PlatformFactory.php new file mode 100644 index 00000000..f70a0c9a --- /dev/null +++ b/src/platform/src/Bridge/Groq/PlatformFactory.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Groq; + +use Symfony\AI\Platform\Bridge\Groq\Llama\ModelClient as LlamaModelClient; +use Symfony\AI\Platform\Bridge\Groq\Llama\ResponseConverter as LlamaResponseConverter; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Platform; +use Symfony\Component\HttpClient\EventSourceHttpClient; +use Symfony\Contracts\HttpClient\HttpClientInterface; + +/** + * @author Christopher Hertel + * @author Dave Hulbert + */ +final readonly class PlatformFactory +{ + public static function create( + #[\SensitiveParameter] + string $apiKey, + ?HttpClientInterface $httpClient = null, + ?Contract $contract = null, + ): Platform { + $httpClient = $httpClient instanceof EventSourceHttpClient ? $httpClient : new EventSourceHttpClient($httpClient); + + return new Platform( + [ + new LlamaModelClient($httpClient, $apiKey), + ], + [ + new LlamaResponseConverter(), + ], + $contract ?? Contract::create(), + ); + } +} diff --git a/src/platform/tests/Bridge/Groq/LlamaTest.php b/src/platform/tests/Bridge/Groq/LlamaTest.php new file mode 100644 index 00000000..1d6a8d9e --- /dev/null +++ b/src/platform/tests/Bridge/Groq/LlamaTest.php @@ -0,0 +1,74 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Groq; + +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Groq\Llama; +use Symfony\AI\Platform\Bridge\Groq\Llama\ModelClient; +use Symfony\AI\Platform\Bridge\Groq\Llama\ResponseConverter; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Model; +use Symfony\Component\HttpClient\MockHttpClient; +use Symfony\Component\HttpClient\Response\MockResponse; + +final class LlamaTest extends TestCase +{ + /** + * @covers \Symfony\AI\Platform\Bridge\Groq\Llama + */ + public function testModelSupportsExpectedCapabilities(): void + { + $model = new Llama(); + + $this->assertTrue($model->supports(Capability::INPUT_MESSAGES)); + $this->assertTrue($model->supports(Capability::OUTPUT_TEXT)); + $this->assertTrue($model->supports(Capability::OUTPUT_STREAMING)); + $this->assertTrue($model->supports(Capability::TOOL_CALLING)); + } + + /** + * @covers \Symfony\AI\Platform\Bridge\Groq\Llama\ModelClient + */ + public function testModelClientSupportsLlamaModel(): void + { + $modelClient = new ModelClient(new MockHttpClient(), 'gsk_test'); + + $this->assertTrue($modelClient->supports(new Llama())); + $this->assertFalse($modelClient->supports(new class('test') extends Model {})); + } + + /** + * @covers \Symfony\AI\Platform\Bridge\Groq\Llama\ResponseConverter + */ + public function testResponseConverterSupportsLlamaModel(): void + { + $responseConverter = new ResponseConverter(); + + $this->assertTrue($responseConverter->supports(new Llama())); + $this->assertFalse($responseConverter->supports(new class('test') extends Model {})); + } + + /** + * @covers \Symfony\AI\Platform\Bridge\Groq\Llama\ModelClient + * @covers \Symfony\AI\Platform\Bridge\Groq\Llama\ResponseConverter + */ + public function testModelClientRequest(): void + { + $httpClient = new MockHttpClient(new MockResponse('{ "choices": [{"message": {"content": "Hello!"}, "finish_reason": "stop"}]}')); + $modelClient = new ModelClient($httpClient, 'gsk_test'); + $model = new Llama(); + + $response = $modelClient->request($model, ['messages' => [['role' => 'user', 'content' => 'Hello']]], []); + + $this->assertSame('Hello!', (new ResponseConverter())->convert($response)->getContent()); + } +}