Skip to content

Commit bc9cfcb

Browse files
集中处理环境变量
1 parent d644ee5 commit bc9cfcb

File tree

1 file changed

+10
-3
lines changed

1 file changed

+10
-3
lines changed

‎examples/lightrag_api_openai_compatible_demo.py‎

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,13 @@
1818
# Configure working directory
1919
WORKING_DIR = os.environ.get("RAG_DIR", f"{DEFAULT_RAG_DIR}")
2020
print(f"WORKING_DIR: {WORKING_DIR}")
21+
LLM_MODEL = os.environ.get("LLM_MODEL", "gpt-4o-mini")
22+
print(f"LLM_MODEL: {LLM_MODEL}")
23+
EMBEDDING_MODEL = os.environ.get("EMBEDDING_MODEL", "text-embedding-3-large")
24+
print(f"EMBEDDING_MODEL: {EMBEDDING_MODEL}")
25+
EMBEDDING_MAX_TOKEN_SIZE = int(os.environ.get("EMBEDDING_MAX_TOKEN_SIZE", 8192))
26+
print(f"EMBEDDING_MAX_TOKEN_SIZE: {EMBEDDING_MAX_TOKEN_SIZE}")
27+
2128
if not os.path.exists(WORKING_DIR):
2229
os.mkdir(WORKING_DIR)
2330

@@ -29,7 +36,7 @@ async def llm_model_func(
2936
prompt, system_prompt=None, history_messages=[], **kwargs
3037
) -> str:
3138
return await openai_complete_if_cache(
32-
os.environ.get("LLM_MODEL", "gpt-4o-mini"),
39+
LLM_MODEL,
3340
prompt,
3441
system_prompt=system_prompt,
3542
history_messages=history_messages,
@@ -43,7 +50,7 @@ async def llm_model_func(
4350
async def embedding_func(texts: list[str]) -> np.ndarray:
4451
return await openai_embedding(
4552
texts,
46-
model=os.environ.get("EMBEDDING_MODEL", "text-embedding-3-large"),
53+
model=EMBEDDING_MODEL,
4754
)
4855

4956

@@ -60,7 +67,7 @@ async def get_embedding_dim():
6067
working_dir=WORKING_DIR,
6168
llm_model_func=llm_model_func,
6269
embedding_func=EmbeddingFunc(embedding_dim=asyncio.run(get_embedding_dim()),
63-
max_token_size=os.environ.get("EMBEDDING_MAX_TOKEN_SIZE", 8192),
70+
max_token_size=EMBEDDING_MAX_TOKEN_SIZE,
6471
func=embedding_func),
6572
)
6673

0 commit comments

Comments
 (0)