Skip to content

Commit 6949026

Browse files
committed
[FEAT] Groq provider
1 parent d4bead9 commit 6949026

File tree

9 files changed

+308
-0
lines changed

9 files changed

+308
-0
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
{
2+
"api_key": ""
3+
}

edenai_apis/apis/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,5 +66,6 @@
6666
from .xai import XAiApi
6767
from .deepseek import DeepseekApi
6868
from .together_ai import TogetheraiApi
69+
from .groq import GroqApi
6970

7071
# THIS NEEDS TO BE DONE AUTOMATICALLY

edenai_apis/apis/groq/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from .groq_api import GroqApi

edenai_apis/apis/groq/errors.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from edenai_apis.utils.exception import (
2+
ProviderErrorLists,
3+
)
4+
5+
ERRORS: ProviderErrorLists = {}

edenai_apis/apis/groq/groq_api.py

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,172 @@
1+
from typing import Dict, List, Literal, Optional, Type, Union
2+
3+
import httpx
4+
from openai import BaseModel
5+
6+
from edenai_apis.features import ProviderInterface, TextInterface
7+
from edenai_apis.features.multimodal.chat.chat_dataclass import (
8+
ChatDataClass as ChatMultimodalDataClass,
9+
)
10+
from edenai_apis.features.multimodal.chat.chat_dataclass import (
11+
ChatMessageDataClass as ChatMultimodalMessageDataClass,
12+
)
13+
from edenai_apis.features.multimodal.chat.chat_dataclass import (
14+
StreamChat as StreamChatMultimodal,
15+
)
16+
from edenai_apis.features.text.chat.chat_dataclass import ChatDataClass, StreamChat
17+
from edenai_apis.llmengine.llm_engine import LLMEngine
18+
from edenai_apis.loaders.data_loader import ProviderDataEnum
19+
from edenai_apis.loaders.loaders import load_provider
20+
from edenai_apis.utils.types import ResponseType
21+
from edenai_apis.features.llm.llm_interface import LlmInterface
22+
from edenai_apis.features.llm.chat.chat_dataclass import ChatDataClass
23+
24+
25+
class GroqApi(ProviderInterface, TextInterface, LlmInterface):
26+
provider_name = "groq"
27+
28+
def __init__(self, api_keys: Dict = {}) -> None:
29+
self.api_settings = load_provider(
30+
ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys
31+
)
32+
self.llm_client = LLMEngine(
33+
provider_name=self.provider_name,
34+
provider_config={
35+
"api_key": self.api_settings.get("api_key"),
36+
},
37+
)
38+
39+
def text__chat(
40+
self,
41+
text: str,
42+
chatbot_global_action: Optional[str] = None,
43+
previous_history: Optional[List[Dict[str, str]]] = None,
44+
temperature: float = 0.0,
45+
max_tokens: int = 25,
46+
model: Optional[str] = None,
47+
stream: bool = False,
48+
available_tools: Optional[List[dict]] = None,
49+
tool_choice: Literal["auto", "required", "none"] = "auto",
50+
tool_results: Optional[List[dict]] = None,
51+
**kwargs,
52+
) -> ResponseType[Union[ChatDataClass, StreamChat]]:
53+
response = self.llm_client.chat(
54+
text=text,
55+
previous_history=previous_history,
56+
chatbot_global_action=chatbot_global_action,
57+
temperature=temperature,
58+
max_tokens=max_tokens,
59+
model=model,
60+
stream=stream,
61+
available_tools=available_tools,
62+
tool_choice=tool_choice,
63+
tool_results=tool_results,
64+
**kwargs,
65+
)
66+
return response
67+
68+
def multimodal__chat(
69+
self,
70+
messages: List[ChatMultimodalMessageDataClass],
71+
chatbot_global_action: Optional[str],
72+
temperature: float = 0,
73+
max_tokens: int = 25,
74+
model: Optional[str] = None,
75+
stop_sequences: Optional[List[str]] = None,
76+
top_k: Optional[int] = None,
77+
top_p: Optional[int] = None,
78+
stream: bool = False,
79+
provider_params: Optional[dict] = None,
80+
response_format=None,
81+
**kwargs,
82+
) -> ResponseType[Union[ChatMultimodalDataClass, StreamChatMultimodal]]:
83+
response = self.llm_client.multimodal_chat(
84+
messages=messages,
85+
chatbot_global_action=chatbot_global_action,
86+
temperature=temperature,
87+
max_tokens=max_tokens,
88+
model=model,
89+
stop_sequences=stop_sequences,
90+
top_k=top_k,
91+
top_p=top_p,
92+
stream=stream,
93+
response_format=response_format,
94+
**kwargs,
95+
)
96+
return response
97+
98+
def llm__chat(
99+
self,
100+
messages: List = [],
101+
model: Optional[str] = None,
102+
timeout: Optional[Union[float, str, httpx.Timeout]] = None,
103+
temperature: Optional[float] = None,
104+
top_p: Optional[float] = None,
105+
n: Optional[int] = None,
106+
stream: Optional[bool] = None,
107+
stream_options: Optional[dict] = None,
108+
stop: Optional[str] = None,
109+
stop_sequences: Optional[any] = None,
110+
max_tokens: Optional[int] = None,
111+
presence_penalty: Optional[float] = None,
112+
frequency_penalty: Optional[float] = None,
113+
logit_bias: Optional[dict] = None,
114+
# openai v1.0+ new params
115+
response_format: Optional[
116+
Union[dict, Type[BaseModel]]
117+
] = None, # Structured outputs
118+
seed: Optional[int] = None,
119+
tools: Optional[List] = None,
120+
tool_choice: Optional[Union[str, dict]] = None,
121+
logprobs: Optional[bool] = None,
122+
top_logprobs: Optional[int] = None,
123+
parallel_tool_calls: Optional[bool] = None,
124+
deployment_id=None,
125+
extra_headers: Optional[dict] = None,
126+
# soon to be deprecated params by OpenAI -> This should be replaced by tools
127+
functions: Optional[List] = None,
128+
function_call: Optional[str] = None,
129+
base_url: Optional[str] = None,
130+
api_version: Optional[str] = None,
131+
api_key: Optional[str] = None,
132+
model_list: Optional[list] = None, # pass in a list of api_base,keys, etc.
133+
drop_invalid_params: bool = True, # If true, all the invalid parameters will be ignored (dropped) before sending to the model
134+
user: str | None = None,
135+
# Optional parameters
136+
**kwargs,
137+
) -> ChatDataClass:
138+
response = self.llm_client.completion(
139+
messages=messages,
140+
model=model,
141+
timeout=timeout,
142+
temperature=temperature,
143+
top_p=top_p,
144+
n=n,
145+
stream=stream,
146+
stream_options=stream_options,
147+
stop=stop,
148+
stop_sequences=stop_sequences,
149+
max_tokens=max_tokens,
150+
presence_penalty=presence_penalty,
151+
frequency_penalty=frequency_penalty,
152+
logit_bias=logit_bias,
153+
response_format=response_format,
154+
seed=seed,
155+
tools=tools,
156+
tool_choice=tool_choice,
157+
logprobs=logprobs,
158+
top_logprobs=top_logprobs,
159+
parallel_tool_calls=parallel_tool_calls,
160+
deployment_id=deployment_id,
161+
extra_headers=extra_headers,
162+
functions=functions,
163+
function_call=function_call,
164+
base_url=base_url,
165+
api_version=api_version,
166+
api_key=api_key,
167+
model_list=model_list,
168+
drop_invalid_params=drop_invalid_params,
169+
user=user,
170+
**kwargs,
171+
)
172+
return response

edenai_apis/apis/groq/info.json

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
{
2+
"text": {
3+
"chat": {
4+
"version": "v1"
5+
}
6+
},
7+
"multimodal": {
8+
"chat": {
9+
"version": "v1"
10+
}
11+
},
12+
"llm": {
13+
"chat": {
14+
"version": "v1"
15+
}
16+
}
17+
}
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"id": "chatcmpl-B71qqu4Y7m1ZVuF5YGqxz7LXzgf0Y",
3+
"created": 1741015112,
4+
"model": "gpt-4o-mini-2024-07-18",
5+
"object": "chat.completion",
6+
"system_fingerprint": "fp_7fcd609668",
7+
"choices": [
8+
{
9+
"finish_reason": "stop",
10+
"index": 0,
11+
"message": {
12+
"content": "Arrr, matey! What ye be seein’ in this here image is a grand pathway, made of wooden planks, weavin' its way through a lush and green landscape. The verdant grass sways in the gentle breeze, and the sky above be a brilliant blue, decorated with fluffy white clouds. Ye can spot trees and bushes on either side, makin' it a perfect setting for a stroll amongst nature. A peaceful place for a pirate at heart, aye!",
13+
"role": "assistant",
14+
"tool_calls": null,
15+
"function_call": null
16+
}
17+
}
18+
],
19+
"provider_time": 3692885792,
20+
"edenai_time": null,
21+
"usage": {
22+
"completion_tokens": 99,
23+
"prompt_tokens": 1170,
24+
"total_tokens": 1269,
25+
"completion_tokens_details": {
26+
"accepted_prediction_tokens": 0,
27+
"audio_tokens": 0,
28+
"reasoning_tokens": 0,
29+
"rejected_prediction_tokens": 0,
30+
"text_tokens": 99
31+
},
32+
"prompt_tokens_details": {
33+
"audio_tokens": 0,
34+
"cached_tokens": 1024,
35+
"text_tokens": null,
36+
"image_tokens": null
37+
}
38+
},
39+
"service_tier": "default",
40+
"cost": 0.0002349
41+
}
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
{
2+
"original_response": {
3+
"id": "chatcmpl-f271ab8f-95c8-4c86-8673-48ef9f63a5d1",
4+
"created": 1742897322,
5+
"model": "groq/llama-3.1-8b-instant",
6+
"object": "chat.completion",
7+
"system_fingerprint": "fp_19ca34f5ed",
8+
"choices": [
9+
{
10+
"finish_reason": "stop",
11+
"index": 0,
12+
"message": {
13+
"content": "barack obama, american, politician, democratic, african, united states, senator, illinois",
14+
"role": "assistant",
15+
"tool_calls": null,
16+
"function_call": null
17+
}
18+
}
19+
],
20+
"provider_time": 295782124,
21+
"edenai_time": null,
22+
"usage": {
23+
"completion_tokens": 21,
24+
"prompt_tokens": 248,
25+
"total_tokens": 269,
26+
"completion_tokens_details": null,
27+
"prompt_tokens_details": null,
28+
"queue_time": 0.094506039,
29+
"prompt_time": 0.007630602,
30+
"completion_time": 0.028,
31+
"total_time": 0.035630602
32+
},
33+
"x_groq": {
34+
"id": "req_01jq6cd6qbe1b9czbz5xxjkfq4"
35+
},
36+
"cost": 1.408e-05
37+
},
38+
"standardized_response": {
39+
"generated_text": "barack obama, american, politician, democratic, african, united states, senator, illinois",
40+
"message": [
41+
{
42+
"role": "user",
43+
"message": "Barack Hussein Obama is an American politician who served as the 44th president of the United States from 2009 to 2017. A member of the Democratic Party, Obama was the first African-American president of the United States. He previously served as a U.S. senator from Illinois from 2005 to 2008 and as an Illinois state senator from 1997 to 2004.",
44+
"tools": null,
45+
"tool_calls": null
46+
},
47+
{
48+
"role": "assistant",
49+
"message": "barack obama, american, politician, democratic, african, united states, senator, illinois",
50+
"tools": null,
51+
"tool_calls": []
52+
}
53+
]
54+
},
55+
"usage": {
56+
"completion_tokens": 21,
57+
"prompt_tokens": 248,
58+
"total_tokens": 269,
59+
"completion_tokens_details": null,
60+
"prompt_tokens_details": null,
61+
"queue_time": 0.094506039,
62+
"prompt_time": 0.007630602,
63+
"completion_time": 0.028,
64+
"total_time": 0.035630602
65+
},
66+
"cost": null
67+
}

edenai_apis/features/text/chat/chat_args.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ def chat_arguments(provider_name: str):
2828
"deepseek": "DeepSeek-V3",
2929
"microsoft": "gpt-4o-mini",
3030
"togetherai": "microsoft/WizardLM-2-8x22B",
31+
"groq": "llama-3.1-8b-instant",
3132
},
3233
"stream": False,
3334
}

0 commit comments

Comments
 (0)