-
Notifications
You must be signed in to change notification settings - Fork 3.8k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Add LM Studio Model and Embeddings Component #4021
Open
EDLLT
wants to merge
1
commit into
langflow-ai:main
Choose a base branch
from
EDLLT:lmstudio
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
+416
−0
Open
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
86 changes: 86 additions & 0 deletions
86
src/backend/base/langflow/components/embeddings/LMStudioEmbeddings.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
from typing import Any | ||
from urllib.parse import urljoin | ||
|
||
import httpx | ||
|
||
from langflow.base.embeddings.model import LCEmbeddingsModel | ||
from langflow.field_typing import Embeddings | ||
from langflow.inputs.inputs import DropdownInput, SecretStrInput | ||
from langflow.io import FloatInput, MessageTextInput | ||
|
||
|
||
class LMStudioEmbeddingsComponent(LCEmbeddingsModel): | ||
display_name: str = "LM Studio Embeddings" | ||
description: str = "Generate embeddings using LM Studio." | ||
icon = "LMStudio" | ||
|
||
def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): | ||
if field_name == "model": | ||
base_url_dict = build_config.get("base_url", {}) | ||
base_url_load_from_db = base_url_dict.get("load_from_db", False) | ||
base_url_value = base_url_dict.get("value") | ||
if base_url_load_from_db: | ||
base_url_value = self.variables(base_url_value) | ||
elif not base_url_value: | ||
base_url_value = "http://localhost:1234/v1" | ||
build_config["model"]["options"] = self.get_model(base_url_value) | ||
|
||
return build_config | ||
|
||
def get_model(self, base_url_value: str) -> list[str]: | ||
try: | ||
url = urljoin(base_url_value, "/v1/models") | ||
with httpx.Client() as client: | ||
response = client.get(url) | ||
response.raise_for_status() | ||
data = response.json() | ||
|
||
return [model["id"] for model in data.get("data", [])] | ||
except Exception as e: | ||
msg = "Could not retrieve models. Please, make sure the LM Studio server is running." | ||
raise ValueError(msg) from e | ||
|
||
inputs = [ | ||
DropdownInput( | ||
name="model", | ||
display_name="Model", | ||
advanced=False, | ||
refresh_button=True, | ||
), | ||
MessageTextInput( | ||
name="base_url", | ||
display_name="LM Studio Base URL", | ||
refresh_button=True, | ||
value="http://localhost:1234/v1", | ||
), | ||
SecretStrInput( | ||
name="api_key", | ||
display_name="LM Studio API Key", | ||
advanced=True, | ||
value="LMSTUDIO_API_KEY", | ||
), | ||
FloatInput( | ||
name="temperature", | ||
display_name="Model Temperature", | ||
value=0.1, | ||
advanced=True, | ||
), | ||
] | ||
|
||
def build_embeddings(self) -> Embeddings: | ||
try: | ||
from langchain_nvidia_ai_endpoints import NVIDIAEmbeddings | ||
except ImportError as e: | ||
msg = "Please install langchain-nvidia-ai-endpoints to use LM Studio Embeddings." | ||
raise ImportError(msg) from e | ||
try: | ||
output = NVIDIAEmbeddings( | ||
model=self.model, | ||
base_url=self.base_url, | ||
temperature=self.temperature, | ||
nvidia_api_key=self.api_key, | ||
) # type: ignore | ||
except Exception as e: | ||
msg = f"Could not connect to LM Studio API. Error: {e}" | ||
raise ValueError(msg) from e | ||
return output |
169 changes: 169 additions & 0 deletions
169
src/backend/base/langflow/components/models/LMStudioModel.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,169 @@ | ||
import operator | ||
from functools import reduce | ||
from typing import Any | ||
from urllib.parse import urljoin | ||
|
||
import httpx | ||
from langchain_openai import ChatOpenAI | ||
from pydantic.v1 import SecretStr | ||
|
||
from langflow.base.models.model import LCModelComponent | ||
from langflow.field_typing import LanguageModel | ||
from langflow.field_typing.range_spec import RangeSpec | ||
from langflow.inputs import ( | ||
BoolInput, | ||
DictInput, | ||
DropdownInput, | ||
FloatInput, | ||
IntInput, | ||
SecretStrInput, | ||
StrInput, | ||
) | ||
from langflow.inputs.inputs import HandleInput | ||
|
||
|
||
class LMStudioModelComponent(LCModelComponent): | ||
display_name = "LM Studio" | ||
description = "Generate text using LM Studio Local LLMs." | ||
icon = "LMStudio" | ||
name = "LMStudioModel" | ||
|
||
def update_build_config(self, build_config: dict, field_value: Any, field_name: str | None = None): | ||
if field_name == "model_name": | ||
base_url_dict = build_config.get("base_url", {}) | ||
base_url_load_from_db = base_url_dict.get("load_from_db", False) | ||
base_url_value = base_url_dict.get("value") | ||
if base_url_load_from_db: | ||
base_url_value = self.variables(base_url_value) | ||
elif not base_url_value: | ||
base_url_value = "http://localhost:1234/v1" | ||
build_config["model_name"]["options"] = self.get_model(base_url_value) | ||
|
||
return build_config | ||
|
||
def get_model(self, base_url_value: str) -> list[str]: | ||
try: | ||
url = urljoin(base_url_value, "/v1/models") | ||
with httpx.Client() as client: | ||
response = client.get(url) | ||
response.raise_for_status() | ||
data = response.json() | ||
|
||
return [model["id"] for model in data.get("data", [])] | ||
except Exception as e: | ||
msg = "Could not retrieve models. Please, make sure the LM Studio server is running." | ||
raise ValueError(msg) from e | ||
|
||
inputs = [ | ||
*LCModelComponent._base_inputs, | ||
IntInput( | ||
name="max_tokens", | ||
display_name="Max Tokens", | ||
advanced=True, | ||
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | ||
range_spec=RangeSpec(min=0, max=128000), | ||
), | ||
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True), | ||
BoolInput( | ||
name="json_mode", | ||
display_name="JSON Mode", | ||
advanced=True, | ||
info="If True, it will output JSON regardless of passing a schema.", | ||
), | ||
DictInput( | ||
name="output_schema", | ||
is_list=True, | ||
display_name="Schema", | ||
advanced=True, | ||
info="The schema for the Output of the model. " | ||
"You must pass the word JSON in the prompt. " | ||
"If left blank, JSON mode will be disabled.", | ||
), | ||
DropdownInput( | ||
name="model_name", | ||
display_name="Model Name", | ||
advanced=False, | ||
refresh_button=True, | ||
), | ||
StrInput( | ||
name="base_url", | ||
display_name="Base URL", | ||
advanced=False, | ||
info="Endpoint of the LM Studio API. Defaults to 'http://localhost:1234/v1' if not specified.", | ||
value="http://localhost:1234/v1", | ||
), | ||
SecretStrInput( | ||
name="api_key", | ||
display_name="LM Studio API Key", | ||
info="The LM Studio API Key to use for LM Studio.", | ||
advanced=True, | ||
value="LMSTUDIO_API_KEY", | ||
), | ||
FloatInput(name="temperature", display_name="Temperature", value=0.1), | ||
IntInput( | ||
name="seed", | ||
display_name="Seed", | ||
info="The seed controls the reproducibility of the job.", | ||
advanced=True, | ||
value=1, | ||
), | ||
HandleInput( | ||
name="output_parser", | ||
display_name="Output Parser", | ||
info="The parser to use to parse the output of the model", | ||
advanced=True, | ||
input_types=["OutputParser"], | ||
), | ||
] | ||
|
||
def build_model(self) -> LanguageModel: # type: ignore[type-var] | ||
# self.output_schema is a list of dictionaries | ||
# let's convert it to a dictionary | ||
output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {}) | ||
lmstudio_api_key = self.api_key | ||
temperature = self.temperature | ||
model_name: str = self.model_name | ||
max_tokens = self.max_tokens | ||
model_kwargs = self.model_kwargs or {} | ||
base_url = self.base_url or "http://localhost:1234/v1" | ||
json_mode = bool(output_schema_dict) or self.json_mode | ||
seed = self.seed | ||
|
||
api_key = SecretStr(lmstudio_api_key) if lmstudio_api_key else None | ||
output = ChatOpenAI( | ||
max_tokens=max_tokens or None, | ||
model_kwargs=model_kwargs, | ||
model=model_name, | ||
base_url=base_url, | ||
api_key=api_key, | ||
temperature=temperature if temperature is not None else 0.1, | ||
seed=seed, | ||
) | ||
if json_mode: | ||
if output_schema_dict: | ||
output = output.with_structured_output(schema=output_schema_dict, method="json_mode") # type: ignore | ||
else: | ||
output = output.bind(response_format={"type": "json_object"}) # type: ignore | ||
|
||
return output # type: ignore | ||
|
||
def _get_exception_message(self, e: Exception): | ||
""" | ||
Get a message from an LM Studio exception. | ||
|
||
Args: | ||
exception (Exception): The exception to get the message from. | ||
|
||
Returns: | ||
str: The message from the exception. | ||
""" | ||
|
||
try: | ||
from openai import BadRequestError | ||
except ImportError: | ||
return None | ||
if isinstance(e, BadRequestError): | ||
message = e.body.get("message") # type: ignore | ||
if message: | ||
return message | ||
return None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,137 @@ | ||
const SvgLMStudio = (props) => ( | ||
<svg | ||
xmlns="http://www.w3.org/2000/svg" | ||
width="2em" | ||
height="2em" | ||
viewBox="0 0 271.68 271.44" | ||
> | ||
<path | ||
fill="#4b28be" | ||
fillRule="evenodd" | ||
stroke="#4b28be" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeWidth="0.372" | ||
d="M0 0H236.16V200.16H0z" | ||
transform="translate(16.8 33.6)" | ||
></path> | ||
<path | ||
fill="#4b28be" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M271.438 211.394c.352 6.937-.443 13.75-2.614 20.34-2.327 7.064-5.873 13.473-10.703 19.19-4.434 5.25-9.531 9.648-15.478 13.082-5.521 3.188-11.408 5.458-17.703 6.396-3.903.582-7.89.602-12.213.876-29.839-.003-59.304-.01-88.77-.008-21.55.001-43.101.017-64.651.026-7.198.248-14.2-.73-21.033-3.084-6.44-2.22-12.137-5.627-17.396-9.925-9.476-7.743-15.557-17.645-18.872-29.302-1.522-5.354-2.134-10.862-1.812-16.82 0-12.484-.02-24.595.016-36.706.117-40.516.255-81.032.385-121.549.783-10.437 3.87-20.055 9.783-28.786C15.52 17.53 22.195 11.63 30.125 7.279c5.944-3.26 12.392-5.275 19.239-6.146C56.162.268 62.942.018 69.772.023c47.72.033 95.44-.083 143.16.104 9.438.036 18.839 1.547 27.249 6.296 3.968 2.24 7.638 5.06 11.254 7.862 6.317 4.895 10.991 11.184 14.38 18.37 2.683 5.686 4.512 11.692 5.174 17.967.353 3.348.317 6.737.433 10.481a1974.9 1974.9 0 00-.05 13.548c-.002 27.216-.01 54.432.002 81.647.007 18.365.041 36.73.064 55.096m-71.57 14.931c.194.042.39.12.584.12 10.57.007 21.141.127 31.708-.058 3.554-.062 6.769-1.583 8.802-4.744 2.357-3.665 2.788-7.602.902-11.587-2.08-4.395-5.844-6.167-10.526-6.248-9.103-.156-18.208-.313-27.312-.383-1.36-.01-2.724.45-4.413.613-1.053-.116-2.106-.333-3.16-.333-16.836-.007-33.671.009-50.508.036-1.995.003-3.998.01-5.984.174-4.559.375-7.655 2.96-9.495 6.957-1.726 3.75-1.124 7.38 1.304 10.734 2.508 3.464 5.958 5.006 10.132 5.01 18.167.02 36.335.015 54.502-.009 1.053-.001 2.104-.246 3.463-.282m-34.406-30.618c2.127.055 4.255.161 6.381.156 8.972-.023 17.95.118 26.912-.19 6.22-.215 10.965-5.693 10.761-11.687-.204-6.01-5.264-10.889-11.478-10.993-9.575-.16-19.15-.296-28.725-.392-1.245-.012-2.493.271-4.112.372-1.262-.034-2.525-.096-3.788-.097-29.802-.005-59.605-.015-89.407.018-2.38.003-4.772.217-7.137.507-4.912.602-8.257 3.765-9.092 8.43-.968 5.404.976 10.397 5.434 12.382 2.399 1.069 5.24 1.586 7.884 1.595 30.866.106 61.733.07 92.6.059 1.126 0 2.252-.142 3.767-.16m-33.916-30.538c.26.053.52.152.78.153 10.903.013 21.812.23 32.707-.062 6.042-.161 11.023-6.043 10.917-11.94-.113-6.298-5.103-11.248-11.906-11.3-9.574-.071-15.429.1-25.004.13-.848.004-5.776.003-6.995.075-.996-.038-1.992-.109-2.988-.109-31.332-.006-62.663-.006-93.995-.002-.93 0-1.866-.011-2.792.066-5.449.45-9.537 3.899-10.47 8.799-1.084 5.698.915 10.817 5.68 12.766 2.543 1.04 5.565 1.24 8.373 1.252 26.674.11 53.35.1 80.025.118 5.119.004 10.238-.03 15.668.054m34.378-55.225c-1.061-.055-2.122-.159-3.184-.159-31.267.01-62.533.028-93.8.056-1.194.001-2.402.024-3.58.196-5.91.867-9.494 4.807-9.831 10.733-.333 5.836 3.05 10.554 8.796 11.86 2.31.526 4.743.698 7.12.701 29.87.036 59.74.012 89.609-.002 1.063 0 2.127-.059 3.582-.028 6.047.066 12.093.162 18.14.187 4.985.021 9.975.097 14.953-.102 6.363-.254 12.095-5.974 12.102-11.894.005-4.857-3.841-11.69-11.479-11.67-10.689.03-21.378.116-32.428.122M144.75 48.498c-1.264-.023-2.528-.074-3.792-.065-7.121.048-14.243.227-21.363.143-11.378-.135-22.754-.462-34.132-.67-8.178-.15-16.358-.393-24.534-.304-5.435.059-10.961.107-16.276 1.086-9.787 1.804-10.814 11.04-6.7 17.956 2.134 3.586 5.693 4.878 9.896 4.87 26.092-.05 52.184.005 78.276-.002 6.587-.001 13.174-.098 20.15-.082 12.045.028 24.09.137 36.136.04 4.331-.035 7.48-3.111 7.992-7.463.278-2.367.357-4.785.232-7.165-.279-5.297-3.992-8.213-9.275-8.264-8.648-.085-15.617-.022-24.266-.04-3.514-.008-8.468-.182-12.344-.04M192.3 78.792c-.399-.018-.797-.052-1.196-.052-31.617.008-63.234.01-94.85.056-1.58.002-3.207.21-4.727.63-5.403 1.489-8.849 5.923-8.864 11.187-.016 5.283 3.31 9.742 8.615 11.108 1.961.505 4.084.52 6.133.524 27.624.044 55.247.052 82.87.054 4.258 0 8.515-.07 13.132-.043 10.369.102 20.738.214 31.107.287.99.007 1.986-.195 2.969-.361 4.645-.785 8.307-4.333 9.173-8.859 1.535-8.026-3.65-14.499-11.9-14.587-10.167-.108-20.337-.022-30.506-.013-.53 0-1.061.067-1.956.069z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M193.236 102.058c-4.259.037-8.516.106-12.774.106-27.624-.002-55.246-.01-82.87-.054-2.049-.003-4.172-.019-6.133-.524-5.305-1.366-8.63-5.825-8.615-11.108.015-5.265 3.461-9.698 8.864-11.187a18.09 18.09 0 014.726-.63c31.617-.046 63.234-.048 94.85-.056.4 0 2.227.485 1.453.177 2.11 1.044 3.568 2.207 4.52 3.715 3.834 4.268 4.287 10.821.555 15.43-.439 2.172-2.404 3.217-4.576 4.13z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M146.064 71.227c-6.586.052-13.173.149-19.76.15-26.092.007-52.184-.048-78.276.002-4.203.008-7.762-1.284-9.896-4.87-4.114-6.916-3.087-16.152 6.7-17.956 5.314-.98 10.84-1.027 16.276-1.086 8.176-.09 16.357.153 24.534.303 11.378.21 22.754.535 34.132.67 7.12.085 14.242-.094 21.363-.143a45.935 45.935 0 014.07.165c.973.482 1.663.877 2.367 1.244 8.685 4.53 9.57 14.355 1.344 19.628a87.625 87.625 0 00-2.854 1.893z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M164.423 133.103c-1.063.032-2.127.091-3.19.092-29.87.014-59.74.037-89.61.001-2.377-.003-4.81-.175-7.119-.7-5.745-1.307-9.129-6.025-8.796-11.86.337-5.927 3.92-9.867 9.83-10.733 1.179-.173 2.387-.196 3.58-.197 31.268-.028 62.535-.046 93.801-.056 1.061 0 3.563 1.319 4.867 1.516 1.527 1.392 1.41 1.39 2.646 2.686 4.339 4.545 4.02 11.574-.783 15.606-1.26 1.941-3.059 3.114-5.226 3.645z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M131.415 164.935c-5.12.016-10.239.048-15.358.045-26.675-.017-53.35-.008-80.025-.118-2.808-.012-5.83-.211-8.372-1.252-4.766-1.95-6.765-7.068-5.68-12.766.932-4.9 5.02-8.348 10.47-8.8.925-.076 1.86-.065 2.791-.065 31.332-.003 62.663-.004 93.995.002.996 0 1.992.071 3.247.246 1.877.977 3.464 2.02 4.353 3.411 5.006 5.198 3.583 13.637-2.365 17.73-1.037.715-2.017.855-3.056 1.567z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M165.253 195.515c-1.127.075-2.253.217-3.38.217-30.866.012-61.733.047-92.6-.059-2.644-.009-5.485-.526-7.883-1.595-4.458-1.985-6.402-6.978-5.434-12.383.835-4.664 4.18-7.828 9.092-8.429 2.365-.29 4.757-.504 7.137-.507 29.803-.034 59.605-.023 89.407-.018 1.263 0 2.526.063 4.063.218 1.513 1.115 2.83 2.027 3.974 3.118 5.934 5.657 4.87 13.972-2.249 18.003-.742.42-1.42.954-2.127 1.435z" | ||
></path> | ||
<path | ||
fill="#fff" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M199.74 226.094c-1.051.132-2.104.377-3.156.378-18.167.024-36.335.029-54.502.009-4.174-.005-7.624-1.545-10.132-5.01-2.428-3.354-3.03-6.983-1.304-10.734 1.84-3.997 4.936-6.582 9.495-6.957 1.986-.164 3.989-.171 5.984-.175 16.836-.026 33.673-.042 50.508-.035 1.054 0 2.197.487 3.506.74 1.067.716 1.8 1.007 2.596 1.603 6.008 4.498 6.166 12.992.323 17.66-1.085.867-2.21 1.682-3.317 2.521z" | ||
></path> | ||
<path | ||
fill="#5a5571" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M146.259 71.261c.616-.607 1.405-1.212 2.24-1.747 7.478-5.273 7.067-15.278-.925-19.808a44.945 44.945 0 01-2.187-1.233c3.418-.252 7.832-.047 11.346-.04 8.649.018 13.862-.075 22.511.01 3.706.208 8.706.029 12.83 5.729 1.731 2.468 1.595 9.188.06 11.657-3.51 4.838-6.914 5.331-9.545 5.506-12.044.097-24.09-.012-36.33-.074z" | ||
></path> | ||
<path | ||
fill="#5a5571" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M193.415 102.09c1.38-1.05 2.989-2.126 4.397-4.164 3.516-4.436 3.192-10.974-.555-15.43-1.006-1.353-3.001-2.791-5.058-3.908.457-.143 1.707 0 2.238 0 10.169-.01 20.338-.095 30.506.013 8.25.088 13.434 6.561 11.9 14.587-.866 4.526-4.528 8.074-9.173 8.859-.983.166-1.98.368-2.969.361-10.37-.073-20.738-.185-31.286-.318z" | ||
></path> | ||
<path | ||
fill="#595471" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M164.62 133.135c1.704-.782 3.556-1.875 5.03-3.677 4.802-4.032 5.12-11.06.782-15.606-1.384-2.276-5.855-3.918-6.875-4.234 10.626-.175 24.285.098 34.974.07 7.638-.02 11.484 6.812 11.479 11.668-.006 5.921-5.74 11.641-12.102 11.895-4.978.199-9.968.123-14.953.102-6.047-.025-12.093-.121-18.336-.218z" | ||
></path> | ||
<path | ||
fill="#595471" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M131.57 164.984c.893-.73 2.056-1.05 2.961-2.185 5.625-4.03 6.8-11.287 2.621-16.65-.617-1.56-4.183-3.575-6.149-4.253.775-.238 3.288-.117 4.136-.12 9.575-.031 19.15-.082 28.724-.01 6.803.05 12.153 5.12 12.266 11.42.106 5.896-4.875 11.778-10.917 11.94-10.895.29-21.804.074-32.707.06-.26 0-.52-.099-.936-.202z" | ||
></path> | ||
<path | ||
fill="#585170" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M165.447 195.543c.513-.51 1.528-.706 2.27-1.126 7.12-4.03 7.847-12.683 1.912-18.34-1.145-1.091-2.462-2.003-3.788-3.095 1.158-.245 2.406-.528 3.651-.516 9.575.096 19.15.232 28.725.392 6.214.104 11.274 4.982 11.478 10.993.204 5.994-4.542 11.473-10.76 11.686-8.963.309-17.941.168-26.913.19-2.127.006-4.254-.1-6.575-.184z" | ||
></path> | ||
<path | ||
fill="#554e6e" | ||
stroke="#000" | ||
strokeLinecap="square" | ||
strokeLinejoin="bevel" | ||
strokeOpacity="0" | ||
strokeWidth="0" | ||
d="M199.894 226.142c.953-.887 2.079-1.702 3.164-2.569 5.843-4.668 5.685-13.162-.323-17.66-.585-.878-4.018-2.084-5.245-2.328 1.27-.34 5.356-.306 6.716-.295 9.105.07 18.208.227 27.312.383 4.682.08 8.447 1.853 10.526 6.248 1.886 3.985 1.454 7.922-.902 11.587-2.033 3.16-5.248 4.682-8.802 4.744-10.567.185-21.138.065-31.708.058-.195 0-.39-.078-.738-.168z" | ||
></path> | ||
</svg> | ||
); | ||
export default SvgLMStudio; |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm using it today (loving it, btw) and it seems it has to be json_schema. We need to update this (and the OpenAI component) to make this work better with BaseModel
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
By OpenAI component, you mean
OpenAIModel.py
right?If I understand correctly, all what needs to be done is to swap
method="json_mode"
formethod="json_schema"
in bothLMStudioModel.py
andOpenAIModel.py