mirror of
https://github.com/FranP-code/classify_saved_videos_yt.git
synced 2025-10-13 00:32:25 +00:00
Docker and scripts
This commit is contained in:
65
Dockerfile
Normal file
65
Dockerfile
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Use Ubuntu 22.04 as base
|
||||||
|
FROM ubuntu:22.04
|
||||||
|
|
||||||
|
# Avoid prompts from apt
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# Enable universe category
|
||||||
|
RUN echo "deb http://archive.ubuntu.com/ubuntu bionic main universe" >> /etc/apt/sources.list
|
||||||
|
RUN echo "deb http://archive.ubuntu.com/ubuntu bionic-security main universe" >> /etc/apt/sources.list
|
||||||
|
RUN echo "deb http://archive.ubuntu.com/ubuntu bionic-updates main universe" >> /etc/apt/sources.list
|
||||||
|
|
||||||
|
|
||||||
|
# Install system dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python3 \
|
||||||
|
python3-pip \
|
||||||
|
python3-venv \
|
||||||
|
python3-dev \
|
||||||
|
curl \
|
||||||
|
wget \
|
||||||
|
git \
|
||||||
|
sudo \
|
||||||
|
xvfb \
|
||||||
|
x11vnc \
|
||||||
|
fluxbox \
|
||||||
|
novnc \
|
||||||
|
websockify \
|
||||||
|
xterm \
|
||||||
|
firefox \
|
||||||
|
python3-tk \
|
||||||
|
python3-dev \
|
||||||
|
build-essential \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
||||||
|
# Install Ollama
|
||||||
|
RUN curl -fsSL https://ollama.ai/install.sh | sh
|
||||||
|
|
||||||
|
# Create user
|
||||||
|
RUN useradd -m -s /bin/bash user && \
|
||||||
|
usermod -aG sudo user && \
|
||||||
|
echo "user ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
|
||||||
|
|
||||||
|
# Set up workspace
|
||||||
|
WORKDIR /workspace
|
||||||
|
RUN chown user:user /workspace
|
||||||
|
|
||||||
|
# Switch to user
|
||||||
|
USER user
|
||||||
|
|
||||||
|
# Install Python packages
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN python3 -m pip install --user -r requirements.txt
|
||||||
|
|
||||||
|
# Set environment
|
||||||
|
ENV PATH="/home/user/.local/bin:$PATH"
|
||||||
|
ENV DISPLAY=:1
|
||||||
|
|
||||||
|
# Copy startup script
|
||||||
|
COPY docker-entrypoint.sh /home/user/
|
||||||
|
USER root
|
||||||
|
RUN chmod +x /home/user/docker-entrypoint.sh
|
||||||
|
USER user
|
||||||
|
|
||||||
|
ENTRYPOINT ["/home/user/docker-entrypoint.sh"]
|
||||||
32
docker-compose.yml
Normal file
32
docker-compose.yml
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
youtube-classifier:
|
||||||
|
build: .
|
||||||
|
volumes:
|
||||||
|
- .:/workspace
|
||||||
|
- ollama-data:/home/user/.ollama
|
||||||
|
- /tmp/.X11-unix:/tmp/.X11-unix:rw
|
||||||
|
ports:
|
||||||
|
- "11434:11434"
|
||||||
|
- "6080:6080"
|
||||||
|
environment:
|
||||||
|
- DISPLAY=${DISPLAY:-:0}
|
||||||
|
- OLLAMA_HOST=0.0.0.0
|
||||||
|
privileged: true
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
working_dir: /workspace
|
||||||
|
|
||||||
|
ollama-standalone:
|
||||||
|
image: ollama/ollama:latest
|
||||||
|
volumes:
|
||||||
|
- ollama-data:/root/.ollama
|
||||||
|
ports:
|
||||||
|
- "11435:11434"
|
||||||
|
environment:
|
||||||
|
- OLLAMA_HOST=0.0.0.0
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
ollama-data:
|
||||||
42
docker-entrypoint.sh
Executable file
42
docker-entrypoint.sh
Executable file
@@ -0,0 +1,42 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Start display server
|
||||||
|
echo "🖥️ Starting display server..."
|
||||||
|
Xvfb :1 -screen 0 1920x1080x24 > /dev/null 2>&1 &
|
||||||
|
sleep 2
|
||||||
|
export DISPLAY=:1
|
||||||
|
|
||||||
|
# Start window manager
|
||||||
|
fluxbox > /dev/null 2>&1 &
|
||||||
|
|
||||||
|
# Start VNC
|
||||||
|
x11vnc -display :1 -nopw -listen localhost -xkb -ncache 10 -ncache_cr -quiet > /dev/null 2>&1 &
|
||||||
|
|
||||||
|
# Start noVNC
|
||||||
|
/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 > /dev/null 2>&1 &
|
||||||
|
|
||||||
|
echo "🖥️ GUI available at: http://localhost:6080/vnc.html"
|
||||||
|
|
||||||
|
# Start Ollama
|
||||||
|
echo "🤖 Starting Ollama..."
|
||||||
|
ollama serve > /dev/null 2>&1 &
|
||||||
|
|
||||||
|
# Wait and pull model
|
||||||
|
echo "⏳ Waiting for Ollama..."
|
||||||
|
sleep 15
|
||||||
|
echo "📥 Pulling qwen2.5vl:7b model..."
|
||||||
|
ollama pull qwen2.5vl:7b
|
||||||
|
|
||||||
|
echo "✅ Setup complete!"
|
||||||
|
echo ""
|
||||||
|
echo "🎬 YouTube Video Classifier Ready!"
|
||||||
|
echo "🖥️ GUI: http://localhost:6080/vnc.html"
|
||||||
|
echo "🤖 API: http://localhost:11434"
|
||||||
|
echo ""
|
||||||
|
echo "📖 Commands:"
|
||||||
|
echo " python test_ollama.py"
|
||||||
|
echo " python demo_classification.py"
|
||||||
|
echo " python script.py"
|
||||||
|
|
||||||
|
# Keep running
|
||||||
|
exec "$@"
|
||||||
63
setup.sh
Executable file
63
setup.sh
Executable file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# YouTube Video Classifier Setup Script
|
||||||
|
|
||||||
|
echo "🎬 YouTube Video Classifier Setup"
|
||||||
|
echo "=================================="
|
||||||
|
|
||||||
|
# Check if Python 3.11 is available
|
||||||
|
if ! command -v python3 &> /dev/null; then
|
||||||
|
echo "❌ Python 3.11 not found. Please install Python 3.11.10"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Python 3.11 found"
|
||||||
|
|
||||||
|
# Create virtual environment
|
||||||
|
echo "📦 Creating virtual environment..."
|
||||||
|
python3 -m venv venv
|
||||||
|
|
||||||
|
# Activate virtual environment
|
||||||
|
echo "🔧 Activating virtual environment..."
|
||||||
|
source venv/bin/activate
|
||||||
|
|
||||||
|
# Install requirements
|
||||||
|
echo "📥 Installing Python dependencies..."
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
# Check if Ollama is installed
|
||||||
|
if ! command -v ollama &> /dev/null; then
|
||||||
|
echo "❌ Ollama not found. Please install Ollama from https://ollama.ai"
|
||||||
|
echo " After installation, run:"
|
||||||
|
echo " 1. ollama serve"
|
||||||
|
echo " 2. ollama pull qwen2.5-vl:7b"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "✅ Ollama found"
|
||||||
|
|
||||||
|
# Check if Ollama is running
|
||||||
|
if ! curl -s http://localhost:11434/api/tags &> /dev/null; then
|
||||||
|
echo "⚠️ Ollama is not running. Starting Ollama..."
|
||||||
|
ollama serve &
|
||||||
|
sleep 5
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Pull Qwen2.5VL model
|
||||||
|
echo "🤖 Pulling Qwen2.5VL model..."
|
||||||
|
ollama pull qwen2.5vl:7b
|
||||||
|
|
||||||
|
# Test setup
|
||||||
|
echo "🧪 Testing setup..."
|
||||||
|
python test_ollama.py
|
||||||
|
|
||||||
|
echo "✅ Setup complete!"
|
||||||
|
echo ""
|
||||||
|
echo "Next steps:"
|
||||||
|
echo "1. Make sure your browser is pinned to the taskbar"
|
||||||
|
echo "2. Update the browser image in img/ folder if needed"
|
||||||
|
echo "3. Run: python script.py"
|
||||||
|
echo ""
|
||||||
|
echo "Optional:"
|
||||||
|
echo "- Run demo: python demo_classification.py"
|
||||||
|
echo "- Analyze results: python playlist_manager.py --analyze"
|
||||||
Reference in New Issue
Block a user