Moved GPT4all to chat plugin more

This commit is contained in:
kvan7 2024-01-22 03:24:31 +00:00
parent 11a2d26393
commit 3f9f8940ec
3 changed files with 56 additions and 57 deletions

View file

@ -1,4 +1,6 @@
from searx.search import SearchWithPlugins from searx.search import SearchWithPlugins
from pathlib import Path
from gpt4all import GPT4All
name = "Chat Plugin" name = "Chat Plugin"
@ -11,5 +13,29 @@ def post_search(request, search: SearchWithPlugins) -> None:
search_request = search.search_query search_request = search.search_query
container = search.result_container container = search.result_container
# container.infoboxes.append(container.infoboxes[0]) # container.infoboxes.append(container.infoboxes[0])
container.chat_box = [{'chat_box': 'hello world'}] container.chat_box = {'chat_box': 'GPT4All'}
container.chat_box[0]['content'] = 'some string that relates to your search query here' + f'\n{search_request.query}' container.chat_box['content'] = 'Generating response to query: ' + f'\n{search_request.query}'
def generate_chat_content(query):
model = GPT4All(model_name='gpt4all-falcon-q4_0.gguf',
model_path=(Path.cwd() / 'searx' / 'plugins'),
allow_download=False)
system_template = """
### System Instructions:
1. Provide concise and directly relevant answers to the specific query in HTML format, emulating the style of an info box on a search engine.
2. Only use appropriate HTML tags (e.g., `<div>`, `<p>`, `<h1>`) to structure the response. Do not use markdown syntax or backticks(```) to format the response.
3. Directly address the query. For example, if the query is about a specific function or method in a programming language, focus on explaining and providing examples of that function or method.
4. Include practical examples or code snippets relevant to the query.
5. Keep definitions or explanations brief and specific, focusing only on aspects directly related to the query.
"""
prompt_template = """
### Query:
{0}
### Expected Information Box:
"""
with model.chat_session(system_template, prompt_template):
response = model.generate(query, max_tokens=500, repeat_penalty=1.3)
return str(response)

View file

@ -3,21 +3,24 @@
<p><bdi>{{ chat_box.content | safe }}</bdi></p> <p><bdi>{{ chat_box.content | safe }}</bdi></p>
</aside> </aside>
<script> <script>
document.addEventListener("DOMContentLoaded", function () { window.onload = function () {
function updateChatBox() { // Extract the 'q' parameter from the search URL in the sidebar
fetch('/get-chat-content') const searchUrl = document.querySelector('#search_url pre').textContent;
.then(response => response.json()) const url = new URL(searchUrl);
.then(data => { const query = url.searchParams.get('q');
const chatBox = document.querySelector('.chat_box');
console.log('response from api', data);
chatBox.querySelector('bdi').textContent = data.chat_box;
chatBox.querySelector('p').textContent = data.content;
})
.catch(error => console.error('Error:', error));
}
// Call updateChatBox after search is completed fetch('/generate-chat-content', {
// This might be tied to an event, depending on how your search functionality is implemented method: 'POST',
updateChatBox(); headers: {
}); 'Content-Type': 'application/json',
},
body: JSON.stringify({ query: query }),
})
.then(response => response.json())
.then(data => {
const chatBox = document.querySelector('.chat_box');
chatBox.querySelector('h2 bdi').style.display = 'none';
chatBox.querySelector('p bdi').innerHTML = data.content;
});
};
</script> </script>

View file

@ -96,7 +96,7 @@ from searx.utils import (
from searx.version import VERSION_STRING, GIT_URL, GIT_BRANCH from searx.version import VERSION_STRING, GIT_URL, GIT_BRANCH
from searx.query import RawTextQuery from searx.query import RawTextQuery
from searx.plugins import Plugin, plugins, initialize as plugin_initialize from searx.plugins import Plugin, plugins, initialize as plugin_initialize
import searx.plugins.chat import searx.plugins.chat as chat
from searx.plugins.oa_doi_rewrite import get_doi_resolver from searx.plugins.oa_doi_rewrite import get_doi_resolver
from searx.preferences import ( from searx.preferences import (
Preferences, Preferences,
@ -664,7 +664,7 @@ def search():
search_query, raw_text_query, _, _, selected_locale = get_search_query_from_webapp( search_query, raw_text_query, _, _, selected_locale = get_search_query_from_webapp(
request.preferences, request.form request.preferences, request.form
) )
if searx.plugins.chat in request.user_plugins: if chat in request.user_plugins:
extremely_bad_global_variable_search_query = raw_text_query.getQuery() extremely_bad_global_variable_search_query = raw_text_query.getQuery()
search = SearchWithPlugins(search_query, request.user_plugins, request) # pylint: disable=redefined-outer-name search = SearchWithPlugins(search_query, request.user_plugins, request) # pylint: disable=redefined-outer-name
result_container = search.search() result_container = search.search()
@ -1309,43 +1309,13 @@ def config():
} }
) )
@app.route('/get-chat-content') @app.route('/generate-chat-content', methods=['POST'])
def get_chat_content(): def generate_chat_content_endpoint():
global extremely_bad_global_variable_search_query if request.json is None:
# Retrieve chat content from wherever it's stored return jsonify({'content': ''})
# temp query = request.json.get('query')
chat_content = None chat_content = chat.generate_chat_content(query)
# retrieve from searx.plugins.chat return jsonify({'chat_box': 'GPT4ALL', 'content': chat_content})
if extremely_bad_global_variable_search_query:
# do task
chat_content = {'chat_box': 'hello world', 'content': 'Not Generated Response Yet'}
model = GPT4All(model_name='gpt4all-falcon-q4_0.gguf',
model_path=(Path.cwd() / 'searx' / 'plugins'),
allow_download=False)
system_template = """
### System Instructions:
1. Provide concise, accurate, and directly relevant answers to queries.
2. Emulate the style of a Google Answer Box, which includes straightforward and authoritative responses.
3. Where appropriate, use bullet points or structured formatting to clearly present information.
4. Avoid unnecessary elaborations and focus on the key points of the query.
5. Ensure the information is up-to-date and factually correct.
"""
prompt_template = """
### Query:
{0}
### Expected Response:
"""
with model.chat_session(system_template, prompt_template):
response = model.generate(extremely_bad_global_variable_search_query)
chat_content['content'] = str(response)
extremely_bad_global_variable_search_query = None
return jsonify(chat_content)
@app.errorhandler(404) @app.errorhandler(404)