AI from code
LLM
ΠΡΠΈΠΌΠ΅Ρ ΡΠ°Π±ΠΎΡΡ Ρ ΠΌΠΎΠ΄Π΅Π»ΡΠΌΠΈ
langchain
langchain-openai
langchain-deepseek
pydantic
pydantic-settingsfrom typing import Optional
import httpx
from functools import partial
from pydantic_settings import BaseSettings
from langchain.chat_models import init_chat_model
from langchain_deepseek import ChatDeepSeek
class _LLMConfig(BaseSettings):
# llm
OPENAI_API_KEY: str
HTTP_PROXY: Optional[str] = None
class Config:
env_file = '.env'
env_file_encoding = 'utf-8'
extra = 'ignore'
def init_model(base_url: str, model: str):
config = _LLMConfig()
return init_chat_model(
base_url=base_url,
api_key=config.OPENAI_API_KEY,
model=model,
use_responses_api=False,
)
def _proxy_client(config: _LLMConfig):
if config.HTTP_PROXY:
return httpx.Client(
verify=False, # Disable SSL verification
proxy=config.HTTP_PROXY
)
return None
def init_deepseek_model(base_url: str, model: str):
config = _LLMConfig()
return partial(
ChatDeepSeek,
api_base=base_url,
api_key=config.OPENAI_API_KEY,
model=model,
use_responses_api=False,
http_client=_proxy_client(config),
)
def main():
deepseek_v3 = init_deepseek_model(
base_url='https://deepseek/...',
model='deepseek-v3-model-id-...'
)
prompt = 'ΠΠ°ΠΊΡΡ Π²Π΅ΡΡΠΈΡ ΠΌΠΎΠ΄Π΅Π»ΠΈ ΡΡ ΠΈΡΠΏΠΎΠ»ΡΠ·ΡΠ΅ΡΡ?'
model = deepseek_v3(temperature=0, max_tokens=1500)
response = model.invoke(prompt)
print('ΠΡΠ²Π΅Ρ ΠΌΠΎΠ΄Π΅Π»ΠΈ', response)
if __name__ == '__main__':
main()
ML
Lead projects
Python
Last updated