Browse Source

refactor: reorganize tools and enhance functionality; remove .env-example

fine-tuned some system prompt
master
Matteo Benedetto 8 months ago
parent
commit
643ee44840
  1. 12
      .env-example
  2. 31
      .gradio/certificate.pem
  3. 10
      README.md
  4. 64
      main.py
  5. 3
      requirements.txt
  6. 80
      test.py
  7. 0
      tools/__init__.py
  8. 25
      tools/qbit.py
  9. 36
      tools/search.py

12
.env-example

@ -1,12 +0,0 @@
# qBittorrent API Configuration
QBIT_HOST=http://localhost:8080
QBIT_USERNAME=admin
QBIT_PASSWORD=password
# OpenAI API Key (required for the LangChain agent)
OPENAI_API_KEY=sk-proj-Rs93xxxxxxxxxxxxxxxxxxxxxUnStmeSHj_gUiEfbGzaFeZf0rgdaQzllQmvcMy6o-SywA
# DuckDuckGo Search Configuration
DUCKDUCKGO_ENABLED=true
DUCKDUCKGO_MAX_RESULTS=5
OMDB_API_KEY=3b6bc268

31
.gradio/certificate.pem

@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
-----END CERTIFICATE-----

10
README.md

@ -42,11 +42,17 @@ Or use the CLI interface by uncommenting the `cli_main()` line in `main.py`.
## Tools
The agent includes several tools:
The tools are organized into separate files for better modularity:
### qBittorrent Tools (`tools/qbit.py`)
- `get_downloads_list`: Get information about current downloads
- `qbittorrent_search`: Search for torrents using qBittorrent's search functionality
- `download_torrent`: Add a torrent to the download queue
- `ForcedDuckDuckGoSearch`: Search for information about media content
### Search Tools (`tools/search.py`)
- `duckduckgo_search`: Search the web using DuckDuckGo
- `media_info_search`: Find detailed information about movies, TV shows, or other media content
- `movies_advice_search`: Get recommendations or advice about movies
## License

64
main.py

@ -1,41 +1,44 @@
import os
from langchain.agents import Tool, initialize_agent, AgentType
from dotenv import load_dotenv
from tools import DownloadListTool, QBitSearchTool, DownloadTorrentTool
from langchain_community.tools import DuckDuckGoSearchRun
from tools.qbit import QbitDownloadListTool, QbitSearchTool, QbitDownloadTorrentTool
from tools.search import MediaInfoSearchTool, MoviesAdviceSearchTool
from langchain.memory import ConversationBufferMemory
from langchain.chat_models import init_chat_model
import gradio as gr
import datetime
# Load environment variables
load_dotenv()
def create_agent():
# Initialize the language model
#llm = init_chat_model("gpt-4.1-mini", model_provider="openai")
llm = init_chat_model("qwen2.5-coder:14b", model_provider="ollama", temperature=0)
# Create system message with current time and other info
current_time = datetime.datetime.now()
llm = init_chat_model("gpt-4o-mini", model_provider="openai")
# Initialize memory
memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
# Initialize search tool
search_tool = DuckDuckGoSearchRun()
# Function to force DuckDuckGo for specific search types
def forced_duckduckgo_search(query: str) -> str:
"""Use DuckDuckGo to search for specific information."""
return search_tool.run(query)
memory = ConversationBufferMemory(
memory_key="chat_history",
return_messages=True,
human_prefix="User",
ai_prefix="Assistant"
)
memory.chat_memory.add_message(f"SYSTEM: today is {current_time.strftime('%Y-%m-%d')}")
memory.chat_memory.add_message(f"SYSTEM:from now on when User ask for movie or tv series suggestion reply with a numbered markdown list with a brief description of each title")
memory.chat_memory.add_message(f"SYSTEM:from now on when list torrents show seeds number and MAGNET LINK (USING A MARKDOWN LINK WITH TEXT 'Magnet link 🧲')")
memory.chat_memory.add_message(f"SYSTEM:from now on, when show downloads list show a clean and nice markdown format with name and the most important information, \
also add near it an emoji of progress of the download to represent how complete it is and set a coloured bullet emoji after status of torrent status, for example blue for uploading, green for downloading, \
red for error, yellow for paused, and grey for completed")
memory.chat_memory.add_message(f"SYSTEM: from now on, when user ask for downolad NEVER start a qbittorrent download if user hasn't viewed the list of torrents first, \
and choosed one of them")
# Initialize tools
tools = [
DownloadListTool(),
QBitSearchTool(),
DownloadTorrentTool(),
Tool(
name="ForcedDuckDuckGoSearch",
func=forced_duckduckgo_search,
description="Use this tool when you need to find specific information about movies, TV shows. Input should be a search query including the keyword 'imdb'.",
)
QbitDownloadListTool(),
QbitSearchTool(),
QbitDownloadTorrentTool(),
MoviesAdviceSearchTool(),
MediaInfoSearchTool()
]
# Initialize the agent with memory
@ -65,20 +68,23 @@ def main():
print("Starting qBittorrent AI Agent...")
# Create Gradio interface
with gr.Blocks(title="qBittorrent AI Agent") as interface:
gr.Markdown("# qBittorrent AI Agent")
gr.Markdown("Ask questions about downloads, search for content, or get recommendations.")
with gr.Blocks(title="qbit-agent") as interface:
gr.Markdown("# qbit-agent")
gr.Markdown("### Made by Matteo with hate and piracy 💀")
gr.Markdown("Ask about downloads, search for content (and torrent), or get recommendations.")
chatbot = gr.ChatInterface(
process_query,
examples=["Find me the latest sci-fi movies",
"What are the top TV shows from 2023?",
"Download Interstellar in 1080p"],
title="qBittorrent Assistant"
"Download Interstellar in 1080p",
"Show me my current downloads",
"What is The Matrix",
"Get me a list of horror movies"],
)
# Launch the interface
interface.launch(share=False)
interface.launch(share=True)
def cli_main():
print("Starting qBittorrent AI Agent in CLI mode...")

3
requirements.txt

@ -3,4 +3,5 @@ openai>=0.27.8
requests>=2.28.2
python-dotenv>=1.0.0
gradio>=3.0.0
langchain_community>=0.0.1
langchain_community>=0.0.1
langchain-openai

80
test.py

@ -0,0 +1,80 @@
import gradio as gr
from langchain_community.llms import Ollama
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.schema import HumanMessage, AIMessage, SystemMessage
from typing import List, Dict, Any
from langchain.chat_models import init_chat_model
# Initialize Ollama model with streaming capability
ollama_model_name = "gemma3" # Change to your preferred model
llm = init_chat_model("qwen2.5-coder:14b", model_provider="ollama", temperature=0,
streaming=True, # Enable streaming
)
# Store conversation history
conversation_history = []
def add_message_to_history(role: str, content: str):
"""Add a message to the conversation history."""
if role == "human":
conversation_history.append(HumanMessage(content=content))
elif role == "ai":
conversation_history.append(AIMessage(content=content))
elif role == "system":
conversation_history.append(SystemMessage(content=content))
return conversation_history
# Initialize with a system message
add_message_to_history("system", "You are a helpful, friendly AI assistant.")
def stream_response(message: str, history: List[List[str]]):
"""Process user message and stream the response."""
# Add user message to history
add_message_to_history("human", message)
# Create a generator to stream responses
response = ""
for chunk in llm.stream([m for m in conversation_history]):
# Extract content from AIMessageChunk
if hasattr(chunk, 'content'):
chunk_content = chunk.content
else:
chunk_content = str(chunk)
response += chunk_content
yield response
# Add AI response to history when complete
add_message_to_history("ai", response)
# Create Gradio interface with streaming
with gr.Blocks() as demo:
gr.Markdown("# Ollama Chatbot with Streaming")
chatbot = gr.Chatbot(height=500)
msg = gr.Textbox(placeholder="Type your message here...", container=False)
clear = gr.Button("Clear Chat")
def user(message, history):
# Return immediately for the user message
return "", history + [[message, None]]
def bot(history):
# Process the last user message
user_message = history[-1][0]
history[-1][1] = "" # Initialize bot's response
for response in stream_response(user_message, history):
history[-1][1] = response
yield history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
# Launch the Gradio interface
demo.queue()
demo.launch(share=False) # Set share=True to create a public link

0
tools/__init__.py

25
tools.py → tools/qbit.py

@ -3,8 +3,9 @@ from langchain.tools.base import BaseTool
from langchain.callbacks.manager import CallbackManagerForToolRun
import requests
from typing import Optional
from langchain_community.tools import DuckDuckGoSearchRun
class DownloadListTool(BaseTool):
class QbitDownloadListTool(BaseTool):
name: str = "get_downloads_list"
description: str = '''Useful for getting a list of current downloads from the qBittorrent API and
information about them. The response will include the name, size, and status of each download.
@ -56,11 +57,11 @@ class DownloadListTool(BaseTool):
except Exception as e:
return f"Error getting downloads list: {str(e)}"
class QBitSearchTool(BaseTool):
class QbitSearchTool(BaseTool):
name: str = "qbittorrent_search"
description: str = '''Useful for searching torrents using qBittorrent's search functionality.
Input should be a search query for content the user wants to find.
The tool will return a list of matching torrents with their details including magnet links.
The tool will return a list of matching torrents ordered by the number of seeders (highest first).
'''
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str:
@ -83,7 +84,7 @@ class QBitSearchTool(BaseTool):
# Start a search
start_search_url = f"{QBIT_HOST}/api/v2/search/start"
search_data = {"pattern": query, "plugins": "all", "category": "all"}
search_data = {"pattern": query, "plugins": "all", "category": "all", "limit": 5, "sort": "seeders", "order": "desc"}
search_response = session.post(start_search_url, data=search_data)
@ -97,7 +98,7 @@ class QBitSearchTool(BaseTool):
# Wait for results (simple implementation, can be improved)
import time
max_wait = 10 # seconds
max_wait = 5 # seconds
wait_time = 0
step = 1
@ -119,7 +120,7 @@ class QBitSearchTool(BaseTool):
# Get search results
results_url = f"{QBIT_HOST}/api/v2/search/results"
results_params = {"id": search_id, "limit": 10} # Limiting to top 10 results
results_params = {"id": search_id, "limit": 5} # Increased limit to find more seeders
results_response = session.get(results_url, params=results_params)
@ -133,18 +134,22 @@ class QBitSearchTool(BaseTool):
stop_url = f"{QBIT_HOST}/api/v2/search/stop"
stop_params = {"id": search_id}
session.post(stop_url, params=stop_params)
print(results)
# Limit to top 10 results after sorting
results = results[:10]
# Format the response
if not results:
return f"No results found for '{query}'."
response = f"Search results for '{query}':\n\n"
response = f"Search results for '{query}' (sorted by seeders):\n\n"
for i, result in enumerate(results, 1):
name = result.get("fileName", "Unknown")
size = result.get("fileSize", "Unknown")
seeds = result.get("seeders", 0)
leech = result.get("leechers", 0)
seeds = result.get("nbSeeders", 0)
leech = result.get("nbLeechers", 0)
magnet = result.get("fileUrl", "")
# Convert size to human-readable format
@ -166,7 +171,7 @@ class QBitSearchTool(BaseTool):
except Exception as e:
return f"Error searching torrents: {str(e)}"
class DownloadTorrentTool(BaseTool):
class QbitDownloadTorrentTool(BaseTool):
name: str = "download_torrent"
description: str = '''Useful for starting a new torrent download in qBittorrent.
Input should be a magnet link or a torrent URL that the user wants to download.

36
tools/search.py

@ -0,0 +1,36 @@
import os
from langchain.tools.base import BaseTool
from langchain.callbacks.manager import CallbackManagerForToolRun
import requests
from typing import Optional
from langchain_community.tools import DuckDuckGoSearchRun
class MediaInfoSearchTool(BaseTool):
name: str = "duckduckgo_search"
description: str = '''Useful for searching the web using DuckDuckGo for information about \
movies and TV shows, actors and directors. To be used only on imdb.com adding relative keyword imdb to query to filter results.
Input should be a search query, and the tool will return relevant results.'''
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str:
"""Perform a DuckDuckGo search."""
try:
search_tool = DuckDuckGoSearchRun()
return search_tool.run(query)
except Exception as e:
return f"Error performing DuckDuckGo search: {str(e)}"
class MoviesAdviceSearchTool(BaseTool):
name: str = "movies_advice_search"
description: str = '''Useful for searching the web using DuckDuckGo for movie recommendations and similar content to a given title or plot.
prefer searching on one (on your preference) known trustworthy sites. add relative keyword (like "reddit" for reddit.com for example) to query to filter results only on that site.
Input should be a search query, and the tool will return relevant results.'''
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str:
"""Perform a DuckDuckGo search."""
try:
search_tool = DuckDuckGoSearchRun()
search_results = search_tool.run(query)
return search_results
except Exception as e:
return f"Error performing DuckDuckGo search: {str(e)}"
Loading…
Cancel
Save