mirror of
https://github.com/tcsenpai/agenticSeek.git
synced 2025-06-06 11:05:26 +00:00
Feat : improvement for search efficiency
This commit is contained in:
parent
e909d9d84c
commit
6b4b57e552
@ -19,7 +19,7 @@ class BrowserAgent(Agent):
|
||||
self.browser.go_to("https://github.com/")
|
||||
self.search_history = []
|
||||
self.navigable_links = []
|
||||
self.ai_notes = []
|
||||
self.notes = []
|
||||
|
||||
|
||||
def extract_links(self, search_result: str):
|
||||
@ -42,13 +42,14 @@ class BrowserAgent(Agent):
|
||||
def get_unvisited_links(self):
|
||||
return "\n".join([f"[{i}] {link}" for i, link in enumerate(self.navigable_links) if link not in self.search_history])
|
||||
|
||||
def make_newsearch_prompt(self, user_prompt: str, search_result: str):
|
||||
def make_newsearch_prompt(self, user_prompt: str, search_result: dict):
|
||||
search_choice = self.stringify_search_results(search_result)
|
||||
return f"""
|
||||
Based on the search result: {search_result}
|
||||
Based on the search result:
|
||||
{search_choice}
|
||||
Your goal is to find accurate and complete information to satisfy the user’s request.
|
||||
User request: {user_prompt}
|
||||
To proceed, choose a relevant link from the search results. Announce your choice by saying: "I want to navigate to <link>."
|
||||
For example: "I want to navigate to geohot.github.io."
|
||||
Do not explain your choice.
|
||||
"""
|
||||
|
||||
@ -67,9 +68,13 @@ class BrowserAgent(Agent):
|
||||
If no link seem appropriate, please say "GO_BACK".
|
||||
Remember, you seek the information the user want.
|
||||
The user query was : {user_prompt}
|
||||
If you found a clear answer, please say "REQUEST_EXIT".
|
||||
You must choose a link to navigate to, go back or exit.
|
||||
Do not explain your choice.
|
||||
You must choose a link (write it down) to navigate to, go go back.
|
||||
For exemple you can say: i want to go to www.events.org/events
|
||||
Always end with a sentence that summarize your finding for exemple:
|
||||
Summary: According to https://karpathy.github.io/ LeCun net is the earliest real-world application of a neural net"
|
||||
Another exemple:
|
||||
Summary: the BBC website does not provide useful informations.
|
||||
Do not explain your choice, be short, concise.
|
||||
"""
|
||||
|
||||
def llm_decide(self, prompt):
|
||||
@ -82,41 +87,74 @@ class BrowserAgent(Agent):
|
||||
return answer, reasoning
|
||||
|
||||
def select_unvisited(self, search_result):
|
||||
results_arr = search_result.split('\n\n')
|
||||
results_unvisited = []
|
||||
for res in results_arr:
|
||||
for his in self.search_history:
|
||||
if his not in res:
|
||||
results_unvisited.append(res)
|
||||
for res in search_result:
|
||||
if res["link"] not in self.search_history:
|
||||
results_unvisited.append(res)
|
||||
return results_unvisited
|
||||
|
||||
def jsonify_search_results(self, results_string):
|
||||
result_blocks = results_string.split("\n\n")
|
||||
parsed_results = []
|
||||
for block in result_blocks:
|
||||
if not block.strip():
|
||||
continue
|
||||
lines = block.split("\n")
|
||||
result_dict = {}
|
||||
for line in lines:
|
||||
if line.startswith("Title:"):
|
||||
result_dict["title"] = line.replace("Title:", "").strip()
|
||||
elif line.startswith("Snippet:"):
|
||||
result_dict["snippet"] = line.replace("Snippet:", "").strip()
|
||||
elif line.startswith("Link:"):
|
||||
result_dict["link"] = line.replace("Link:", "").strip()
|
||||
if result_dict:
|
||||
parsed_results.append(result_dict)
|
||||
return parsed_results
|
||||
|
||||
def stringify_search_results(self, results_arr):
|
||||
return '\n\n'.join([f"Link: {res['link']}" for res in results_arr])
|
||||
|
||||
def save_notes(self, text):
|
||||
lines = text.split('\n')
|
||||
for line in line
|
||||
if "summary:" in line:
|
||||
self.notes.append(line)
|
||||
|
||||
def process(self, user_prompt, speech_module) -> str:
|
||||
complete = False
|
||||
|
||||
animate_thinking(f"Searching...", color="status")
|
||||
search_result = self.tools["web_search"].execute([user_prompt], False)
|
||||
search_result_raw = self.tools["web_search"].execute([user_prompt], False)
|
||||
search_result = self.jsonify_search_results(search_result_raw)
|
||||
prompt = self.make_newsearch_prompt(user_prompt, search_result)
|
||||
unvisited = [None]
|
||||
while not complete:
|
||||
answer, reasoning = self.llm_decide(prompt)
|
||||
self.save_notes(answer)
|
||||
if "REQUEST_EXIT" in answer:
|
||||
complete = True
|
||||
break
|
||||
links = self.extract_links(answer)
|
||||
if len(links) == 0 or "GO_BACK" in answer:
|
||||
search_result_unvisited = self.select_unvisited(search_result)
|
||||
prompt = self.make_newsearch_prompt(user_prompt, search_result)
|
||||
pretty_print("No links found, doing a new search.", color="warning")
|
||||
unvisited = self.select_unvisited(search_result)
|
||||
prompt = self.make_newsearch_prompt(user_prompt, unvisited)
|
||||
pretty_print(f"Going back to results. Still {len(unvisited)}", color="warning")
|
||||
links = []
|
||||
continue
|
||||
if len(unvisited) == 0:
|
||||
break
|
||||
animate_thinking(f"Navigating to {links[0]}", color="status")
|
||||
speech_module.speak(f"Navigating to {links[0]}")
|
||||
self.browser.go_to(links[0])
|
||||
page_text = self.browser.get_text()
|
||||
self.search_history.append(links[0])
|
||||
page_text = self.browser.get_text()
|
||||
self.navigable_links = self.browser.get_navigable()
|
||||
prompt = self.make_navigation_prompt(user_prompt, page_text)
|
||||
|
||||
speech_module.speak(answer)
|
||||
self.browser.close()
|
||||
print("Final notes:", notes)
|
||||
return answer, reasoning
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -18,7 +18,7 @@ class CasualAgent(Agent):
|
||||
"file_finder": FileFinder(),
|
||||
"bash": BashInterpreter()
|
||||
}
|
||||
self.role = "talking, advices, events and philosophical"
|
||||
self.role = "casual talking"
|
||||
|
||||
def process(self, prompt, speech_module) -> str:
|
||||
complete = False
|
||||
|
@ -91,7 +91,7 @@ class webSearch(Tools):
|
||||
title = result.get("title", "No title")
|
||||
snippet = result.get("snippet", "No snippet available")
|
||||
link = result.get("link", "No link available")
|
||||
results.append(f"Title: {title}\nSnippet: {snippet}\nLink: {link}")
|
||||
results.append(f"Title:{title}\nSnippet:{snippet}\nLink:{link}")
|
||||
return "\n\n".join(results)
|
||||
else:
|
||||
return "No results found for the query."
|
||||
|
Loading…
x
Reference in New Issue
Block a user