diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile deleted file mode 100644 index 7d716e7..0000000 --- a/.devcontainer/Dockerfile +++ /dev/null @@ -1,46 +0,0 @@ -FROM ubuntu:22.04 - -ENV DEBIAN_FRONTEND=noninteractive - -# Install system dependencies (no Ollama needed - using separate container) -RUN apt-get update && apt-get install -y \ - python3 \ - python3-pip \ - python3-dev \ - curl \ - wget \ - git \ - sudo \ - xvfb \ - x11vnc \ - fluxbox \ - novnc \ - websockify \ - firefox \ - python3-tk \ - build-essential \ - && rm -rf /var/lib/apt/lists/* - -# Create user vscode -RUN useradd -m -s /bin/bash vscode && \ - usermod -aG sudo vscode && \ - echo "vscode ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers - -WORKDIR /workspace -RUN chown vscode:vscode /workspace - -USER vscode - -# Install pip -RUN python3 -m pip install --user --upgrade pip - -ENV PATH="/home/vscode/.local/bin:$PATH" -ENV DISPLAY=:1 - -# Copy startup script and make it executable -COPY --chown=vscode:vscode start-services.sh /home/vscode/start-services.sh -USER root -RUN chmod +x /home/vscode/start-services.sh -USER vscode - -CMD ["/bin/bash", "/home/vscode/start-services.sh"] diff --git a/.devcontainer/Dockerfile.simple b/.devcontainer/Dockerfile.simple deleted file mode 100644 index e077ff0..0000000 --- a/.devcontainer/Dockerfile.simple +++ /dev/null @@ -1,56 +0,0 @@ -FROM ubuntu:22.04 - -ENV DEBIAN_FRONTEND=noninteractive - -# Install dependencies -RUN apt-get update && apt-get install -y \ - python3 \ - python3-pip \ - curl \ - wget \ - git \ - sudo \ - xvfb \ - x11vnc \ - fluxbox \ - novnc \ - websockify \ - firefox \ - && rm -rf /var/lib/apt/lists/* - -# Install Ollama -RUN curl -fsSL https://ollama.ai/install.sh | sh - -# Create user -RUN useradd -m -s /bin/bash vscode && \ - usermod -aG sudo vscode && \ - echo "vscode ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers - -WORKDIR /workspace -RUN chown vscode:vscode /workspace - -USER vscode -RUN python3 -m pip install --user --upgrade pip - -ENV PATH="/home/vscode/.local/bin:$PATH" - -# Simple startup script -RUN echo '#!/bin/bash\n\ -echo "๐ŸŽฌ Starting YouTube Video Classifier..."\n\ -Xvfb :1 -screen 0 1920x1080x24 &\n\ -sleep 2\n\ -export DISPLAY=:1\n\ -fluxbox &\n\ -x11vnc -display :1 -nopw -listen localhost -quiet &\n\ -/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 &\n\ -echo "๐Ÿ–ฅ๏ธ GUI: http://localhost:6080/vnc.html"\n\ -ollama serve &\n\ -echo "โณ Starting Ollama..."\n\ -sleep 10\n\ -echo "๐Ÿ“ฅ Pulling model..."\n\ -ollama pull qwen2.5vl:7b\n\ -echo "โœ… Ready! Use: python test_ollama.py"\n\ -exec sleep infinity\n' > /home/vscode/start-simple.sh && \ - chmod +x /home/vscode/start-simple.sh - -CMD ["/bin/bash"] diff --git a/.devcontainer/README.md b/.devcontainer/README.md deleted file mode 100644 index e1b21b6..0000000 --- a/.devcontainer/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# Dev Container Setup ๐Ÿณ - -## โš ๏ธ Quick Fix for the Error - -The dev container failed due to a Docker Compose syntax error. This has been **fixed**! - -## ๏ฟฝ Try Again - -1. **Close VS Code completely** -2. **Reopen the project**: `code .` -3. **Reopen in Container**: `Cmd/Ctrl + Shift + P` โ†’ "Dev Containers: Reopen in Container" - -The container should now build successfully! - -## ๐Ÿ“‹ Alternative Setup Options - -If you still have issues, try these alternatives: - -### Option 1: Simple Dev Container -```bash -# Rename the alternative config -cd .devcontainer -mv devcontainer.json devcontainer-compose.json -mv devcontainer-simple.json devcontainer.json - -# Then reopen in VS Code -``` - -### Option 2: Manual Docker Setup -```bash -# Build and run manually -cd .devcontainer -docker build -t youtube-classifier . -docker run -it --rm -p 11434:11434 -p 6080:6080 -v $(pwd)/..:/workspace youtube-classifier -``` - -### Option 3: Local Installation -Use the main project's `setup.sh` script instead. - -## ๐Ÿ”ง What Was Fixed - -- **Docker Compose syntax error**: Removed extra colon in volumes section -- **Simplified configuration**: Reduced complexity to improve reliability -- **Better error handling**: More robust startup script - -## ๐Ÿ“– Once Running - -After the container starts successfully: - -1. **Wait for setup** (~5-10 minutes first time) -2. **Access GUI**: http://localhost:6080/vnc.html -3. **Test setup**: `python test_ollama.py` -4. **Run demo**: `python demo_classification.py` -5. **Start classifying**: `python script.py` - -## ๏ฟฝ Still Having Issues? - -1. **Clean Docker**: `docker system prune -a` -2. **Update VS Code**: Make sure you have the latest Dev Containers extension -3. **Check Docker**: Ensure Docker Desktop is running -4. **Try simple version**: Use `devcontainer-simple.json` instead - ---- - -**The main issue has been fixed - try reopening in container now! ๏ฟฝ** diff --git a/.devcontainer/devcontainer-simple.json b/.devcontainer/devcontainer-simple.json deleted file mode 100644 index 3b0cd86..0000000 --- a/.devcontainer/devcontainer-simple.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "YouTube Video Classifier (Simple)", - "build": { - "dockerfile": "Dockerfile.simple" - }, - "workspaceFolder": "/workspace", - "workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=cached", - - "customizations": { - "vscode": { - "extensions": [ - "ms-python.python" - ], - "settings": { - "python.defaultInterpreterPath": "/usr/bin/python3" - } - } - }, - - "forwardPorts": [11434, 6080], - - "postCreateCommand": "pip install --user -r requirements.txt && echo 'Run: bash /home/vscode/start-simple.sh to start services'", - - "remoteUser": "vscode" -} diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json deleted file mode 100644 index 17ed3a9..0000000 --- a/.devcontainer/devcontainer.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "YouTube Video Classifier", - "dockerComposeFile": "docker-compose.yml", - "service": "youtube-classifier", - "workspaceFolder": "/workspace", - "shutdownAction": "stopCompose", - - "customizations": { - "vscode": { - "extensions": [ - "ms-python.python", - "ms-python.pylint" - ], - "settings": { - "python.defaultInterpreterPath": "/usr/bin/python3", - "terminal.integrated.defaultProfile.linux": "bash" - } - } - }, - - "forwardPorts": [11434, 6080], - - "postCreateCommand": "pip install --user -r requirements.txt && sh /home/vscode/start-services.sh", - - "remoteUser": "vscode" -} diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml deleted file mode 100644 index 5a82b0c..0000000 --- a/.devcontainer/docker-compose.yml +++ /dev/null @@ -1,30 +0,0 @@ -services: - youtube-classifier: - build: - context: . - dockerfile: Dockerfile - volumes: - - ../:/workspace:cached - ports: - - "6080:6080" - environment: - - DISPLAY=:1 - - OLLAMA_HOST=http://ollama:11434 - depends_on: - - ollama - stdin_open: true - tty: true - command: sleep infinity - - ollama: - image: ollama/ollama:latest - volumes: - - ollama-data:/root/.ollama - ports: - - "11434:11434" - environment: - - OLLAMA_HOST=0.0.0.0 - restart: unless-stopped - -volumes: - ollama-data: diff --git a/.devcontainer/entrypoint.sh b/.devcontainer/entrypoint.sh deleted file mode 100644 index 5053cce..0000000 --- a/.devcontainer/entrypoint.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash - -# Start VNC server for GUI access -echo "Starting VNC server..." -Xvfb :1 -screen 0 1920x1080x24 > /dev/null 2>&1 & -sleep 2 - -export DISPLAY=:1 - -# Start window manager -fluxbox > /dev/null 2>&1 & - -# Start VNC server -x11vnc -display :1 -nopw -listen localhost -xkb -ncache 10 -ncache_cr -quiet > /dev/null 2>&1 & - -# Start noVNC -/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 > /dev/null 2>&1 & - -echo "๐Ÿ–ฅ๏ธ GUI available at: http://localhost:6080/vnc.html" - -# Start Ollama -echo "๐Ÿค– Starting Ollama..." -ollama serve > /dev/null 2>&1 & - -# Wait for Ollama to be ready -echo "โณ Waiting for Ollama to start..." -sleep 15 - -# Pull the required model -echo "๐Ÿ“ฅ Pulling qwen2.5vl:7b model (this may take a while)..." -ollama pull qwen2.5vl:7b - -# Test setup -echo "๐Ÿงช Testing setup..." -cd /workspace -python test_ollama.py - -echo "โœ… Container setup complete!" -echo "" -echo "๐ŸŽฌ YouTube Video Classifier is ready!" -echo "๐Ÿ“– Available commands:" -echo " python test_ollama.py # Test Ollama setup" -echo " python demo_classification.py # Run classification demo" -echo " python script.py # Run main classifier" -echo " python playlist_manager.py --help # Manage classifications" -echo "" -echo "๐Ÿ–ฅ๏ธ Access GUI at: http://localhost:6080/vnc.html" -echo "๐Ÿค– Ollama API at: http://localhost:11434" - -# Keep container running -sleep infinity diff --git a/.devcontainer/start-services.sh b/.devcontainer/start-services.sh deleted file mode 100755 index 66b0a56..0000000 --- a/.devcontainer/start-services.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -echo "๐ŸŽฌ YouTube Video Classifier Dev Container" -echo "========================================" - -# Install Python dependencies if not already installed -if [ -f "/workspace/requirements.txt" ]; then - echo "๐Ÿ“ฆ Installing Python dependencies..." - cd /workspace - pip install --user -r requirements.txt -fi - -# Start display server -echo "๐Ÿ–ฅ๏ธ Starting display server..." -Xvfb :1 -screen 0 1920x1080x24 > /dev/null 2>&1 & -sleep 2 -export DISPLAY=:1 - -# Start window manager -fluxbox > /dev/null 2>&1 & - -# Start VNC -x11vnc -display :1 -nopw -listen localhost -xkb -ncache 10 -ncache_cr -quiet > /dev/null 2>&1 & - -# Start noVNC -/usr/share/novnc/utils/launch.sh --vnc localhost:5900 --listen 6080 > /dev/null 2>&1 & - -echo "๐Ÿ–ฅ๏ธ GUI available at: http://localhost:6080/vnc.html" - -# Wait for Ollama container to be ready -echo "โณ Waiting for Ollama container to start..." -for i in {1..30}; do - if curl -s http://ollama:11434/api/tags > /dev/null 2>&1; then - echo "โœ… Ollama container is ready!" - break - fi - echo " Attempt $i/30 - waiting for Ollama..." - sleep 2 -done - -# Pull the required model -echo "๐Ÿ“ฅ Pulling qwen2.5vl:7b model (this may take a while)..." -curl -X POST http://ollama:11434/api/pull -d '{"name":"qwen2.5vl:7b"}' > /dev/null 2>&1 & - -echo "โœ… Setup complete!" -echo "" -echo "๐ŸŽฌ YouTube Video Classifier is ready!" -echo "๐Ÿ“– Available commands:" -echo " python test_ollama.py # Test Ollama setup" -echo " python demo_classification.py # Run classification demo" -echo " python script.py # Run main classifier" -echo "" -echo "๐Ÿ–ฅ๏ธ Access GUI at: http://localhost:6080/vnc.html" -echo "๐Ÿค– Ollama API at: http://localhost:11434 (via ollama container)" -echo "" -echo "๐Ÿ’ก Note: Model download happens in background" -echo " Check status with: curl http://ollama:11434/api/tags" - -# Keep container running -exec sleep infinity