Frontend options
LLM_MODEL_MAP = {'gpt4o': LLMModels.openai.gpt4o, 'gpt4o-mini': LLMModels.openai.gpt4o_mini, 'chatgpt': LLMModels.openai.chatgpt40, 'sonnet': LLMModels.anthropic.claude_sonnet, 'haiku': LLMModels.anthropic.claude_haiku, 'command-r': 'not_implemented', 'command-r+': 'not_implemented', 'gemini-flash': 'not_implemented', 'gemini-pro': 'not_implemented'}
module-attribute
LLM_PROVIDER_MAP = {'gpt4o': LLMProviders.OPENAI, 'gpt4o-mini': LLMProviders.OPENAI, 'chatgpt': LLMProviders.OPENAI, 'sonnet': LLMProviders.ANTHROPIC, 'haiku': LLMProviders.ANTHROPIC, 'command-r': LLMProviders.COHERE, 'command-r+': LLMProviders.COHERE, 'gemini-flash': LLMProviders.GOOGLE, 'gemini-pro': LLMProviders.GOOGLE}
module-attribute
logger = logging.getLogger(__name__)
module-attribute
CombinedCondenseSelection
Bases: Enum
Single control that then sets several multiple condense settings at once.
LARGE = 2
class-attribute
instance-attribute
MEDIUM = 1
class-attribute
instance-attribute
SMALL = 0
class-attribute
instance-attribute
OptionsBeingModifiedState
Bases: BeingModifiedStateBase, MyState
conf_id = rx.field(None)
class-attribute
instance-attribute
modifiable_values = OptionsModifiableValues()
class-attribute
instance-attribute
provider_and_model_value = 'gpt4o'
class-attribute
instance-attribute
create()
classmethod
handle_llm_change(selected)
max_model_total_tokens()
on_load()
async
set_additional_instructions_prompt(value)
set_additional_system_prompt(value)
set_combined_condense_selection(value)
set_max_intermediate_steps(value)
set_max_previous_messages(value)
set_max_total(value)
update(update)
async
OptionsModifiableValues
Bases: ModifiableValuesBase