forked from truefoundry/cognita
-
Notifications
You must be signed in to change notification settings - Fork 0
/
env.local.example
32 lines (23 loc) · 954 Bytes
/
env.local.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
# For local setup
METADATA_STORE_CONFIG='{"provider":"local","config":{"path":"local.metadata.yaml"}}'
VECTOR_DB_CONFIG='{"provider":"qdrant","local":"true"}'
DEBUG_MODE=true
LOG_LEVEL="DEBUG"
LOCAL=true
# Repository offers following categories of LLM providers to use from.
# One using OpenAI functions, OpenAI compatible LLMs and Embeddings can bee used either by setting `OPENAI_API_KEY`.
# OR
# One using Truefoundry LLM Gateway requires setting `TFY_API_KEY`, `TFY_LLM_GATEWAY_URL` and `TFY_HOST`.
# OR
# Another using entirely OpenSource LLMs (th' [Ollama](https://ollama.com/library).
# It requires setting `OLLAMA_URL` if hosted on different endpoint other than default in env file)
# If have access to OPENAI Models
OPENAI_API_KEY=""
# If Ollama is installed in the system
OLLAMA_URL="http://localhost:11434"
# Truefoundry API Key
TFY_API_KEY=""
# Truefoundry tenant url
TFY_HOST=""
# Truefoundry LLM Gateway url
TFY_LLM_GATEWAY_URL=""