Devcontainer setup

This commit is contained in:
2025-07-11 18:58:57 -03:00
parent f94ae9c31a
commit 19c25908da
8 changed files with 359 additions and 0 deletions

46
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,46 @@
FROM ubuntu:22.04
ENV DEBIAN_FRONTEND=noninteractive
# Install system dependencies (no Ollama needed - using separate container)
RUN apt-get update && apt-get install -y \
python3 \
python3-pip \
python3-dev \
curl \
wget \
git \
sudo \
xvfb \
x11vnc \
fluxbox \
novnc \
websockify \
firefox \
python3-tk \
build-essential \
&& rm -rf /var/lib/apt/lists/*
# Create user vscode
RUN useradd -m -s /bin/bash vscode && \
usermod -aG sudo vscode && \
echo "vscode ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
WORKDIR /workspace
RUN chown vscode:vscode /workspace
USER vscode
# Install pip
RUN python3 -m pip install --user --upgrade pip
ENV PATH="/home/vscode/.local/bin:$PATH"
ENV DISPLAY=:1
# Copy startup script and make it executable
COPY --chown=vscode:vscode start-services.sh /home/vscode/start-services.sh
USER root
RUN chmod +x /home/vscode/start-services.sh
USER vscode
CMD ["/bin/bash", "/home/vscode/start-services.sh"]

View File

@@ -0,0 +1,56 @@
FROM ubuntu:22.04
ENV DEBIAN_FRONTEND=noninteractive
# Install dependencies
RUN apt-get update && apt-get install -y \
python3 \
python3-pip \
curl \
wget \
git \
sudo \
xvfb \
x11vnc \
fluxbox \
novnc \
websockify \
firefox \
&& rm -rf /var/lib/apt/lists/*
# Install Ollama
RUN curl -fsSL https://ollama.ai/install.sh | sh
# Create user
RUN useradd -m -s /bin/bash vscode && \
usermod -aG sudo vscode && \
echo "vscode ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers
WORKDIR /workspace
RUN chown vscode:vscode /workspace
USER vscode
RUN python3 -m pip install --user --upgrade pip
ENV PATH="/home/vscode/.local/bin:$PATH"
# Simple startup script
RUN echo '#!/bin/bash\n\
echo "🎬 Starting YouTube Video Classifier..."\n\
Xvfb :1 -screen 0 1920x1080x24 &\n\
sleep 2\n\
export DISPLAY=:1\n\
fluxbox &\n\
x11vnc -display :1 -nopw -listen localhost -quiet &\n\
/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 &\n\
echo "🖥️ GUI: http://localhost:6080/vnc.html"\n\
ollama serve &\n\
echo "⏳ Starting Ollama..."\n\
sleep 10\n\
echo "📥 Pulling model..."\n\
ollama pull qwen2.5vl:7b\n\
echo "✅ Ready! Use: python test_ollama.py"\n\
exec sleep infinity\n' > /home/vscode/start-simple.sh && \
chmod +x /home/vscode/start-simple.sh
CMD ["/bin/bash"]

65
.devcontainer/README.md Normal file
View File

@@ -0,0 +1,65 @@
# Dev Container Setup 🐳
## ⚠️ Quick Fix for the Error
The dev container failed due to a Docker Compose syntax error. This has been **fixed**!
## <20> Try Again
1. **Close VS Code completely**
2. **Reopen the project**: `code .`
3. **Reopen in Container**: `Cmd/Ctrl + Shift + P` → "Dev Containers: Reopen in Container"
The container should now build successfully!
## 📋 Alternative Setup Options
If you still have issues, try these alternatives:
### Option 1: Simple Dev Container
```bash
# Rename the alternative config
cd .devcontainer
mv devcontainer.json devcontainer-compose.json
mv devcontainer-simple.json devcontainer.json
# Then reopen in VS Code
```
### Option 2: Manual Docker Setup
```bash
# Build and run manually
cd .devcontainer
docker build -t youtube-classifier .
docker run -it --rm -p 11434:11434 -p 6080:6080 -v $(pwd)/..:/workspace youtube-classifier
```
### Option 3: Local Installation
Use the main project's `setup.sh` script instead.
## 🔧 What Was Fixed
- **Docker Compose syntax error**: Removed extra colon in volumes section
- **Simplified configuration**: Reduced complexity to improve reliability
- **Better error handling**: More robust startup script
## 📖 Once Running
After the container starts successfully:
1. **Wait for setup** (~5-10 minutes first time)
2. **Access GUI**: http://localhost:6080/vnc.html
3. **Test setup**: `python test_ollama.py`
4. **Run demo**: `python demo_classification.py`
5. **Start classifying**: `python script.py`
## <20> Still Having Issues?
1. **Clean Docker**: `docker system prune -a`
2. **Update VS Code**: Make sure you have the latest Dev Containers extension
3. **Check Docker**: Ensure Docker Desktop is running
4. **Try simple version**: Use `devcontainer-simple.json` instead
---
**The main issue has been fixed - try reopening in container now! <20>**

View File

@@ -0,0 +1,25 @@
{
"name": "YouTube Video Classifier (Simple)",
"build": {
"dockerfile": "Dockerfile.simple"
},
"workspaceFolder": "/workspace",
"workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached",
"customizations": {
"vscode": {
"extensions": [
"ms-python.python"
],
"settings": {
"python.defaultInterpreterPath": "/usr/bin/python3"
}
}
},
"forwardPorts": [11434, 6080],
"postCreateCommand": "pip install --user -r requirements.txt && echo 'Run: bash /home/vscode/start-simple.sh to start services'",
"remoteUser": "vscode"
}

View File

@@ -0,0 +1,26 @@
{
"name": "YouTube Video Classifier",
"dockerComposeFile": "docker-compose.yml",
"service": "youtube-classifier",
"workspaceFolder": "/workspace",
"shutdownAction": "stopCompose",
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.pylint"
],
"settings": {
"python.defaultInterpreterPath": "/usr/bin/python3",
"terminal.integrated.defaultProfile.linux": "bash"
}
}
},
"forwardPorts": [11434, 6080],
"postCreateCommand": "pip install --user -r requirements.txt && sh /home/vscode/start-services.sh",
"remoteUser": "vscode"
}

View File

@@ -0,0 +1,30 @@
services:
youtube-classifier:
build:
context: .
dockerfile: Dockerfile
volumes:
- ../:/workspace:cached
ports:
- "6080:6080"
environment:
- DISPLAY=:1
- OLLAMA_HOST=http://ollama:11434
depends_on:
- ollama
stdin_open: true
tty: true
command: sleep infinity
ollama:
image: ollama/ollama:latest
volumes:
- ollama-data:/root/.ollama
ports:
- "11434:11434"
environment:
- OLLAMA_HOST=0.0.0.0
restart: unless-stopped
volumes:
ollama-data:

View File

@@ -0,0 +1,51 @@
#!/bin/bash
# Start VNC server for GUI access
echo "Starting VNC server..."
Xvfb :1 -screen 0 1920x1080x24 > /dev/null 2>&1 &
sleep 2
export DISPLAY=:1
# Start window manager
fluxbox > /dev/null 2>&1 &
# Start VNC server
x11vnc -display :1 -nopw -listen localhost -xkb -ncache 10 -ncache_cr -quiet > /dev/null 2>&1 &
# Start noVNC
/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 > /dev/null 2>&1 &
echo "🖥️ GUI available at: http://localhost:6080/vnc.html"
# Start Ollama
echo "🤖 Starting Ollama..."
ollama serve > /dev/null 2>&1 &
# Wait for Ollama to be ready
echo "⏳ Waiting for Ollama to start..."
sleep 15
# Pull the required model
echo "📥 Pulling qwen2.5vl:7b model (this may take a while)..."
ollama pull qwen2.5vl:7b
# Test setup
echo "🧪 Testing setup..."
cd /workspace
python test_ollama.py
echo "✅ Container setup complete!"
echo ""
echo "🎬 YouTube Video Classifier is ready!"
echo "📖 Available commands:"
echo " python test_ollama.py # Test Ollama setup"
echo " python demo_classification.py # Run classification demo"
echo " python script.py # Run main classifier"
echo " python playlist_manager.py --help # Manage classifications"
echo ""
echo "🖥️ Access GUI at: http://localhost:6080/vnc.html"
echo "🤖 Ollama API at: http://localhost:11434"
# Keep container running
sleep infinity

60
.devcontainer/start-services.sh Executable file
View File

@@ -0,0 +1,60 @@
#!/bin/bash
echo "🎬 YouTube Video Classifier Dev Container"
echo "========================================"
# Install Python dependencies if not already installed
if [ -f "/workspace/requirements.txt" ]; then
echo "📦 Installing Python dependencies..."
cd /workspace
pip install --user -r requirements.txt
fi
# Start display server
echo "🖥️ Starting display server..."
Xvfb :1 -screen 0 1920x1080x24 > /dev/null 2>&1 &
sleep 2
export DISPLAY=:1
# Start window manager
fluxbox > /dev/null 2>&1 &
# Start VNC
x11vnc -display :1 -nopw -listen localhost -xkb -ncache 10 -ncache_cr -quiet > /dev/null 2>&1 &
# Start noVNC
/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 > /dev/null 2>&1 &
echo "🖥️ GUI available at: http://localhost:6080/vnc.html"
# Wait for Ollama container to be ready
echo "⏳ Waiting for Ollama container to start..."
for i in {1..30}; do
if curl -s http://ollama:11434/api/tags > /dev/null 2>&1; then
echo "✅ Ollama container is ready!"
break
fi
echo " Attempt $i/30 - waiting for Ollama..."
sleep 2
done
# Pull the required model
echo "📥 Pulling qwen2.5vl:7b model (this may take a while)..."
curl -X POST http://ollama:11434/api/pull -d '{"name":"qwen2.5vl:7b"}' > /dev/null 2>&1 &
echo "✅ Setup complete!"
echo ""
echo "🎬 YouTube Video Classifier is ready!"
echo "📖 Available commands:"
echo " python test_ollama.py # Test Ollama setup"
echo " python demo_classification.py # Run classification demo"
echo " python script.py # Run main classifier"
echo ""
echo "🖥️ Access GUI at: http://localhost:6080/vnc.html"
echo "🤖 Ollama API at: http://localhost:11434 (via ollama container)"
echo ""
echo "💡 Note: Model download happens in background"
echo " Check status with: curl http://ollama:11434/api/tags"
# Keep container running
exec sleep infinity