diff --git a/summarizer.py b/summarizer.py index 0489919..7c6a1c5 100644 --- a/summarizer.py +++ b/summarizer.py @@ -45,7 +45,7 @@ def setup_summarization_chain(): input_variables=["text"], ) - llm = ChatOllama(model="llama3", base_url="http://127.0.0.1:11434") + llm = ChatOllama(model="llama3:instruct", base_url="http://127.0.0.1:11434") llm_chain = LLMChain(llm=llm, prompt=prompt_template) return llm_chain diff --git a/translator.py b/translator.py index 430432d..a3fcd5f 100644 --- a/translator.py +++ b/translator.py @@ -14,6 +14,6 @@ def setup_translator_chain(): input_variables=["text"], ) - llm = ChatOllama(model="llama3", base_url="http://127.0.0.1:11434") + llm = ChatOllama(model="llama3:instruct", base_url="http://127.0.0.1:11434") llm_chain = LLMChain(llm=llm, prompt=prompt_template) return llm_chain diff --git a/yt_summarizer.py b/yt_summarizer.py index aaa4032..b34c821 100644 --- a/yt_summarizer.py +++ b/yt_summarizer.py @@ -48,7 +48,7 @@ def yt_summarization_chain(): DETAILED SUMMARY:""", input_variables=["text"], ) - llm = ChatOllama(model="llama3", base_url="http://127.0.0.1:11434") + llm = ChatOllama(model="llama3:instruct", base_url="http://127.0.0.1:11434") summarize_chain = load_summarize_chain( llm=llm, prompt=prompt_template, verbose=True )