9 changed files with 207 additions and 54 deletions
@ -1,12 +0,0 @@
|
||||
# qBittorrent API Configuration |
||||
QBIT_HOST=http://localhost:8080 |
||||
QBIT_USERNAME=admin |
||||
QBIT_PASSWORD=password |
||||
|
||||
# OpenAI API Key (required for the LangChain agent) |
||||
OPENAI_API_KEY=sk-proj-Rs93xxxxxxxxxxxxxxxxxxxxxUnStmeSHj_gUiEfbGzaFeZf0rgdaQzllQmvcMy6o-SywA |
||||
|
||||
# DuckDuckGo Search Configuration |
||||
DUCKDUCKGO_ENABLED=true |
||||
DUCKDUCKGO_MAX_RESULTS=5 |
||||
OMDB_API_KEY=3b6bc268 |
||||
@ -0,0 +1,31 @@
|
||||
-----BEGIN CERTIFICATE----- |
||||
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw |
||||
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh |
||||
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 |
||||
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu |
||||
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY |
||||
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc |
||||
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ |
||||
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U |
||||
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW |
||||
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH |
||||
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC |
||||
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv |
||||
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn |
||||
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn |
||||
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw |
||||
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI |
||||
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV |
||||
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq |
||||
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL |
||||
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ |
||||
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK |
||||
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 |
||||
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur |
||||
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC |
||||
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc |
||||
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq |
||||
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA |
||||
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d |
||||
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= |
||||
-----END CERTIFICATE----- |
||||
@ -0,0 +1,80 @@
|
||||
import gradio as gr |
||||
from langchain_community.llms import Ollama |
||||
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler |
||||
from langchain.schema import HumanMessage, AIMessage, SystemMessage |
||||
from typing import List, Dict, Any |
||||
from langchain.chat_models import init_chat_model |
||||
|
||||
# Initialize Ollama model with streaming capability |
||||
ollama_model_name = "gemma3" # Change to your preferred model |
||||
llm = init_chat_model("qwen2.5-coder:14b", model_provider="ollama", temperature=0, |
||||
streaming=True, # Enable streaming |
||||
) |
||||
|
||||
# Store conversation history |
||||
conversation_history = [] |
||||
|
||||
def add_message_to_history(role: str, content: str): |
||||
"""Add a message to the conversation history.""" |
||||
if role == "human": |
||||
conversation_history.append(HumanMessage(content=content)) |
||||
elif role == "ai": |
||||
conversation_history.append(AIMessage(content=content)) |
||||
elif role == "system": |
||||
conversation_history.append(SystemMessage(content=content)) |
||||
return conversation_history |
||||
|
||||
# Initialize with a system message |
||||
add_message_to_history("system", "You are a helpful, friendly AI assistant.") |
||||
|
||||
def stream_response(message: str, history: List[List[str]]): |
||||
"""Process user message and stream the response.""" |
||||
# Add user message to history |
||||
add_message_to_history("human", message) |
||||
|
||||
# Create a generator to stream responses |
||||
response = "" |
||||
for chunk in llm.stream([m for m in conversation_history]): |
||||
# Extract content from AIMessageChunk |
||||
if hasattr(chunk, 'content'): |
||||
chunk_content = chunk.content |
||||
else: |
||||
chunk_content = str(chunk) |
||||
|
||||
response += chunk_content |
||||
yield response |
||||
|
||||
# Add AI response to history when complete |
||||
add_message_to_history("ai", response) |
||||
|
||||
# Create Gradio interface with streaming |
||||
with gr.Blocks() as demo: |
||||
gr.Markdown("# Ollama Chatbot with Streaming") |
||||
|
||||
chatbot = gr.Chatbot(height=500) |
||||
msg = gr.Textbox(placeholder="Type your message here...", container=False) |
||||
clear = gr.Button("Clear Chat") |
||||
|
||||
def user(message, history): |
||||
# Return immediately for the user message |
||||
return "", history + [[message, None]] |
||||
|
||||
def bot(history): |
||||
# Process the last user message |
||||
user_message = history[-1][0] |
||||
history[-1][1] = "" # Initialize bot's response |
||||
|
||||
for response in stream_response(user_message, history): |
||||
history[-1][1] = response |
||||
yield history |
||||
|
||||
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( |
||||
bot, chatbot, chatbot |
||||
) |
||||
|
||||
clear.click(lambda: None, None, chatbot, queue=False) |
||||
|
||||
if __name__ == "__main__": |
||||
# Launch the Gradio interface |
||||
demo.queue() |
||||
demo.launch(share=False) # Set share=True to create a public link |
||||
@ -0,0 +1,36 @@
|
||||
import os |
||||
from langchain.tools.base import BaseTool |
||||
from langchain.callbacks.manager import CallbackManagerForToolRun |
||||
import requests |
||||
from typing import Optional |
||||
from langchain_community.tools import DuckDuckGoSearchRun |
||||
|
||||
|
||||
class MediaInfoSearchTool(BaseTool): |
||||
name: str = "duckduckgo_search" |
||||
description: str = '''Useful for searching the web using DuckDuckGo for information about \ |
||||
movies and TV shows, actors and directors. To be used only on imdb.com adding relative keyword imdb to query to filter results. |
||||
Input should be a search query, and the tool will return relevant results.''' |
||||
|
||||
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str: |
||||
"""Perform a DuckDuckGo search.""" |
||||
try: |
||||
search_tool = DuckDuckGoSearchRun() |
||||
return search_tool.run(query) |
||||
except Exception as e: |
||||
return f"Error performing DuckDuckGo search: {str(e)}" |
||||
|
||||
class MoviesAdviceSearchTool(BaseTool): |
||||
name: str = "movies_advice_search" |
||||
description: str = '''Useful for searching the web using DuckDuckGo for movie recommendations and similar content to a given title or plot. |
||||
prefer searching on one (on your preference) known trustworthy sites. add relative keyword (like "reddit" for reddit.com for example) to query to filter results only on that site. |
||||
Input should be a search query, and the tool will return relevant results.''' |
||||
|
||||
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str: |
||||
"""Perform a DuckDuckGo search.""" |
||||
try: |
||||
search_tool = DuckDuckGoSearchRun() |
||||
search_results = search_tool.run(query) |
||||
return search_results |
||||
except Exception as e: |
||||
return f"Error performing DuckDuckGo search: {str(e)}" |
||||
Loading…
Reference in new issue