From 6f3fb4dce4e2d4ecd990f272aae9c2aa6823974e Mon Sep 17 00:00:00 2001 From: martin legrand Date: Mon, 17 Mar 2025 15:58:17 +0100 Subject: [PATCH] Fix : web navigation problems --- sources/agents/browser_agent.py | 35 +++++++++++++++++++-------------- sources/tools/searxSearch.py | 2 ++ 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/sources/agents/browser_agent.py b/sources/agents/browser_agent.py index be1a9ef..758a4dc 100644 --- a/sources/agents/browser_agent.py +++ b/sources/agents/browser_agent.py @@ -66,23 +66,25 @@ class BrowserAgent(Agent): You can navigate to these links: {remaining_links} + You must choose a link (write it down) to navigate to, or go back. + For exemple you can say: i want to go to www.wikipedia.org/cats + + Follow up with a summary of the page content (of the current page, not of the link), for example: + Summary: According to https://karpathy.github.io/ LeCun net is the earliest real-world application of a neural net" + The summary should include any useful finding that are useful in answering user query. + If a website does not have usefull information say Error, for exemple: + Error: This forum does not discus anything that can answer the user query + Be short, concise, direct. + If no link seem appropriate, please say "GO_BACK". Remember, you seek the information the user want. The user query was : {user_prompt} - You must choose a link (write it down) to navigate to, or go back. - For exemple you can say: i want to go to www.wikipedia.org/cats - Always end with a sentence that summarize when useful information is found for exemple: - Summary: According to https://karpathy.github.io/ LeCun net is the earliest real-world application of a neural net" - Do not say "according to this page", always write down the whole link. - If a website does not have usefull information say Error, for exemple: - Error: This forum does not discus anything that can answer the user query - Do not explain your choice, be short, concise. """ def llm_decide(self, prompt): animate_thinking("Thinking...", color="status") self.memory.push('user', prompt) - answer, reasoning = self.llm_request(prompt) + answer, reasoning = self.llm_request() pretty_print("-"*100) pretty_print(answer, color="output") pretty_print("-"*100) @@ -120,17 +122,20 @@ class BrowserAgent(Agent): def save_notes(self, text): lines = text.split('\n') for line in lines: - if "summary:" in line.lower(): + if "summary" in line.lower(): self.notes.append(line) def conclude_prompt(self, user_query): - search_note = '\n -'.join(self.notes) + annotated_notes = [f"{i+1}: {note.lower().replace('summary:', '')}" for i, note in enumerate(self.notes)] + search_note = '\n'.join(annotated_notes) print("AI research notes:\n", search_note) return f""" - Following a web search about: + Following a human request: {user_query} - Write a conclusion based on these notes: + A web AI made the following finding across different pages: {search_note} + + Summarize the finding, and provide a conclusion that answer the request. """ def process(self, user_prompt, speech_module) -> str: @@ -138,8 +143,7 @@ class BrowserAgent(Agent): animate_thinking(f"Searching...", color="status") search_result_raw = self.tools["web_search"].execute([user_prompt], False) - search_result = self.jsonify_search_results(search_result_raw) - search_result = search_result[:5] # until futher improvement + search_result = self.jsonify_search_results(search_result_raw)[:5] # until futher improvement prompt = self.make_newsearch_prompt(user_prompt, search_result) unvisited = [None] while not complete: @@ -167,6 +171,7 @@ class BrowserAgent(Agent): self.browser.close() prompt = self.conclude_prompt(user_prompt) + self.memory.push('user', prompt) answer, reasoning = self.llm_request(prompt) pretty_print(answer, color="output") speech_module.speak(answer) diff --git a/sources/tools/searxSearch.py b/sources/tools/searxSearch.py index 4ed3110..9b8a0fb 100644 --- a/sources/tools/searxSearch.py +++ b/sources/tools/searxSearch.py @@ -90,6 +90,8 @@ class searxSearch(Tools): title = article.find('h3').text.strip() if article.find('h3') else "No Title" description = article.find('p', class_='content').text.strip() if article.find('p', class_='content') else "No Description" results.append(f"Title:{title}\nSnippet:{description}\nLink:{url}") + if len(results) == 0: + raise Exception("Searx search failed. did you run start_services.sh? Did docker die?") return "\n\n".join(results) # Return results as a single string, separated by newlines except requests.exceptions.RequestException as e: return f"Error during search: {str(e)}"