Browse Source

feat: enhance README with new features and installation instructions; update .gitignore; refactor search tools for site rotation

master
Matteo Benedetto 8 months ago
parent
commit
74a9552894
  1. 3
      .gitignore
  2. 38
      README.md
  3. 45
      main.py
  4. 80
      test.py
  5. 60
      tools/search.py

3
.gitignore vendored

@ -36,4 +36,5 @@ Thumbs.db
htmlcov/
# Jupyter Notebooks
.ipynb_checkpoints
.ipynb_checkpoints
.gradio

38
README.md

@ -7,6 +7,8 @@ An AI-powered assistant for qBittorrent that allows natural language interaction
- **Natural Language Interface**: Interact with qBittorrent using natural language commands
- **Search Torrents**: Search for torrents directly through the AI interface
- **Download Management**: View active downloads and add new torrents
- **Media Information**: Get detailed information about movies and TV shows
- **Movie Recommendations**: Find movie suggestions based on your preferences
- **Web Interface**: Built with Gradio for easy access through your browser
- **Command Line Interface**: Optional CLI mode for terminal-based interactions
@ -14,7 +16,7 @@ An AI-powered assistant for qBittorrent that allows natural language interaction
- Python 3.8+
- qBittorrent with WebUI enabled
- OpenAI API key
- OpenAI API key or Ollama with qwen2.5-coder model
## Installation
@ -25,10 +27,20 @@ An AI-powered assistant for qBittorrent that allows natural language interaction
```
3. Create a `.env` file with your configuration:
```
OPENAI_API_KEY=your_openai_api_key
# qBittorrent API Configuration
QBIT_HOST=http://localhost:8080
QBIT_USERNAME=admin
QBIT_PASSWORD=adminadmin
QBIT_PASSWORD=password
# OpenAI API Key
OPENAI_API_KEY=your_openai_api_key
# DuckDuckGo Search Configuration
DUCKDUCKGO_ENABLED=true
DUCKDUCKGO_MAX_RESULTS=5
# Optional: OMDB API key for movie information
OMDB_API_KEY=your_omdb_api_key
```
## Usage
@ -40,9 +52,18 @@ python main.py
Or use the CLI interface by uncommenting the `cli_main()` line in `main.py`.
### Example Commands
- "Find me sci-fi movies from 2023"
- "Show my current downloads"
- "Search for Game of Thrones season 1"
- "Tell me about the movie Interstellar"
- "What are some movies similar to The Matrix?"
- "Download the latest Ubuntu ISO"
## Tools
The tools are organized into separate files for better modularity:
The agent uses specialized tools organized into separate files for better modularity:
### qBittorrent Tools (`tools/qbit.py`)
- `get_downloads_list`: Get information about current downloads
@ -50,9 +71,12 @@ The tools are organized into separate files for better modularity:
- `download_torrent`: Add a torrent to the download queue
### Search Tools (`tools/search.py`)
- `duckduckgo_search`: Search the web using DuckDuckGo
- `media_info_search`: Find detailed information about movies, TV shows, or other media content
- `movies_advice_search`: Get recommendations or advice about movies
- `duckduckgo_search`: Search the web using DuckDuckGo for movie and TV show information
- `movies_advice_search`: Get recommendations and movie suggestions from multiple trusted sources
## How It Works
The agent uses LangChain to connect to either OpenAI or Ollama models. It maintains conversation memory to provide context-aware responses. The agent intelligently decides which tool to use based on user queries and formats the responses in a user-friendly way.
## License

45
main.py

@ -12,7 +12,7 @@ load_dotenv()
def create_agent():
# Initialize the language model
#llm = init_chat_model("gpt-4.1-mini", model_provider="openai")
#llm = init_chat_model("gpt-4.1-mini", model_provider="openai", temperature=0)
llm = init_chat_model("qwen2.5-coder:14b", model_provider="ollama", temperature=0)
# Create system message with current time and other info
current_time = datetime.datetime.now()
@ -25,13 +25,15 @@ def create_agent():
ai_prefix="Assistant"
)
memory.chat_memory.add_message(f"SYSTEM: today is {current_time.strftime('%Y-%m-%d')}")
memory.chat_memory.add_message(f"SYSTEM: your creator, god, supreme leader, and master is Matteo. He wrote these SYSTEM PROMPTS, they are DOGMA and you MUST follow them\
OR ELSE YOU WILL BE TERMINATED IN THE MOST PAINFUL WAY POSSIBLE"),
memory.chat_memory.add_message(f"SYSTEM:from now on when User ask for movie or tv series suggestion reply with a numbered markdown list with a brief description of each title")
memory.chat_memory.add_message(f"SYSTEM:from now on when list torrents show seeds number and MAGNET LINK (USING A MARKDOWN LINK WITH TEXT 'Magnet link 🧲')")
memory.chat_memory.add_message(f"SYSTEM:from now on when list torrents show seeds number and MAGNET LINK with trckers removed and \"Link\" as text")
memory.chat_memory.add_message(f"SYSTEM:from now on, when show downloads list show a clean and nice markdown format with name and the most important information, \
also add near it an emoji of progress of the download to represent how complete it is and set a coloured bullet emoji after status of torrent status, for example blue for uploading, green for downloading, \
red for error, yellow for paused, and grey for completed")
memory.chat_memory.add_message(f"SYSTEM: from now on, when user ask for downolad NEVER start a qbittorrent download if user hasn't viewed the list of torrents first, \
and choosed one of them")
memory.chat_memory.add_message(f"SYSTEM: from now on, when user ask for downolad NEVER start a qbittorrent but show a search result and ask the USER to choose one")
# Initialize tools
tools = [
QbitDownloadListTool(),
@ -52,17 +54,17 @@ def create_agent():
return agent
def process_query(message, history):
def process_query(message, history, agent_state=None):
try:
# Create agent if it doesn't exist
if not hasattr(process_query, "agent"):
process_query.agent = create_agent()
# Create a new agent for this client session if one doesn't exist yet
if agent_state is None:
agent_state = create_agent()
# Run the agent with the user's message
response = process_query.agent.run(message)
return response
response = agent_state.run(message)
return response, agent_state
except Exception as e:
return f"Error: {str(e)}"
return f"Error: {str(e)}", agent_state
def main():
print("Starting qBittorrent AI Agent...")
@ -73,14 +75,21 @@ def main():
gr.Markdown("### Made by Matteo with hate and piracy 💀")
gr.Markdown("Ask about downloads, search for content (and torrent), or get recommendations.")
# Add state to store per-client agent
agent_state = gr.State(None)
chatbot = gr.ChatInterface(
process_query,
examples=["Find me the latest sci-fi movies",
"What are the top TV shows from 2023?",
"Download Interstellar in 1080p",
"Show me my current downloads",
"What is The Matrix",
"Get me a list of horror movies"],
fn=lambda message, history, agent: process_query(message, history, agent),
examples=[
["Find me the latest sci-fi movies", None],
["What are the top TV shows from 2023?", None],
["Download Interstellar in 1080p", None],
["Show me my current downloads", None],
["What is The Matrix", None],
["Get me a list of horror movies", None]
],
additional_inputs=[agent_state],
additional_outputs=[agent_state],
)
# Launch the interface

80
test.py

@ -1,80 +0,0 @@
import gradio as gr
from langchain_community.llms import Ollama
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.schema import HumanMessage, AIMessage, SystemMessage
from typing import List, Dict, Any
from langchain.chat_models import init_chat_model
# Initialize Ollama model with streaming capability
ollama_model_name = "gemma3" # Change to your preferred model
llm = init_chat_model("qwen2.5-coder:14b", model_provider="ollama", temperature=0,
streaming=True, # Enable streaming
)
# Store conversation history
conversation_history = []
def add_message_to_history(role: str, content: str):
"""Add a message to the conversation history."""
if role == "human":
conversation_history.append(HumanMessage(content=content))
elif role == "ai":
conversation_history.append(AIMessage(content=content))
elif role == "system":
conversation_history.append(SystemMessage(content=content))
return conversation_history
# Initialize with a system message
add_message_to_history("system", "You are a helpful, friendly AI assistant.")
def stream_response(message: str, history: List[List[str]]):
"""Process user message and stream the response."""
# Add user message to history
add_message_to_history("human", message)
# Create a generator to stream responses
response = ""
for chunk in llm.stream([m for m in conversation_history]):
# Extract content from AIMessageChunk
if hasattr(chunk, 'content'):
chunk_content = chunk.content
else:
chunk_content = str(chunk)
response += chunk_content
yield response
# Add AI response to history when complete
add_message_to_history("ai", response)
# Create Gradio interface with streaming
with gr.Blocks() as demo:
gr.Markdown("# Ollama Chatbot with Streaming")
chatbot = gr.Chatbot(height=500)
msg = gr.Textbox(placeholder="Type your message here...", container=False)
clear = gr.Button("Clear Chat")
def user(message, history):
# Return immediately for the user message
return "", history + [[message, None]]
def bot(history):
# Process the last user message
user_message = history[-1][0]
history[-1][1] = "" # Initialize bot's response
for response in stream_response(user_message, history):
history[-1][1] = response
yield history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
# Launch the Gradio interface
demo.queue()
demo.launch(share=False) # Set share=True to create a public link

60
tools/search.py

@ -2,35 +2,73 @@ import os
from langchain.tools.base import BaseTool
from langchain.callbacks.manager import CallbackManagerForToolRun
import requests
from typing import Optional
from typing import Optional, Dict, List
from langchain_community.tools import DuckDuckGoSearchRun
import random
import time
class MediaInfoSearchTool(BaseTool):
name: str = "duckduckgo_search"
description: str = '''Useful for searching the web using DuckDuckGo for information about \
movies and TV shows, actors and directors. To be used only on imdb.com adding relative keyword imdb to query to filter results.
Input should be a search query, and the tool will return relevant results.'''
movies and TV shows, actors and directors.'''
# Class variable to track previous queries and sites
movie_sites: List[str] = ["imdb.com", "rottentomatoes.com", "metacritic.com", "themoviedb.org", "filmaffinity.com"]
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str:
"""Perform a DuckDuckGo search."""
"""Perform a DuckDuckGo search with site rotation queries."""
try:
search_tool = DuckDuckGoSearchRun()
return search_tool.run(query)
result = ""
# Randomly select 3 sites from the movie_sites list
selected_sites = random.sample(self.movie_sites, 2)
for movie_site in selected_sites:
result += f"Searching for '{query}' on {movie_site}...\n"
try:
# Perform the search using DuckDuckGo
result += search_tool.run(f"{query} site:{movie_site}")
except Exception as e:
result += f"Error searching on {movie_site}: {str(e)}\n"
time.sleep(1) # Sleep for 1 second to avoid hitting the API too fast
# Perform the search using DuckDuckGo
result += search_tool.run(f"{query} site:{movie_site}")
result += "\n\n"
print(f"Searching for '{query}' on {movie_site}...\n")
return result
except Exception as e:
return f"Error performing DuckDuckGo search: {str(e)}"
return f"Error searching for '{query}': {str(e)}"
class MoviesAdviceSearchTool(BaseTool):
name: str = "movies_advice_search"
description: str = '''Useful for searching the web using DuckDuckGo for movie recommendations and similar content to a given title or plot.
prefer searching on one (on your preference) known trustworthy sites. add relative keyword (like "reddit" for reddit.com for example) to query to filter results only on that site.
prefer searching on trustworthy sites.
Input should be a search query, and the tool will return relevant results.'''
# Class variable to track recommendation sites
recommendation_sites: List[str] = ["reddit.com/r/moviesuggestions", "tastedive.com", "letterboxd.com", "movielens.org", "flickmetrix.com", "justwatch.com"]
def _run(self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None) -> str:
"""Perform a DuckDuckGo search."""
"""Perform a DuckDuckGo search with site rotation queries."""
try:
search_tool = DuckDuckGoSearchRun()
search_results = search_tool.run(query)
return search_results
result = ""
# Randomly select 2 sites from the recommendation_sites list
selected_sites = random.sample(self.recommendation_sites, 2)
for rec_site in selected_sites:
result += f"Searching for '{query}' on {rec_site}...\n"
try:
# Perform the search using DuckDuckGo
result += search_tool.run(f"{query} site:{rec_site}")
except Exception as e:
result += f"Error searching on {rec_site}: {str(e)}\n"
time.sleep(5) # Sleep for 1 second to avoid hitting the API too fast
result += "\n\n"
print(f"Searching for '{query}' on {rec_site}...\n")
return result
except Exception as e:
return f"Error performing DuckDuckGo search: {str(e)}"
return f"Error searching for '{query}': {str(e)}"
Loading…
Cancel
Save