lama / Dockerfile
ngandugilbert's picture
Update Dockerfile
5f73a13 verified
FROM ollama/ollama:latest
# Install necessary tools and Ngrok
RUN apt-get update && apt-get install -y \
curl \
wget \
&& rm -rf /var/lib/apt/lists/*
# Download and install Ngrok
RUN wget -q https://bin.equinox.io/c/bNyj1mQVY4c/ngrok-v3-stable-linux-amd64.tgz \
&& tar -xzf ngrok-v3-stable-linux-amd64.tgz -C /usr/local/bin \
&& rm ngrok-v3-stable-linux-amd64.tgz
# Create /app directory
RUN mkdir -p /app
# Create start_services.sh script
RUN echo '#!/bin/bash\n\
# Start Ollama server in the background\n\
ollama serve &\n\
# Wait for Ollama to start\n\
echo "Waiting for Ollama to start..."\n\
timeout 60 bash -c "until curl -s http://localhost:11434/api/tags > /dev/null 2>&1; do sleep 1; done"\n\
if [ $? -ne 0 ]; then\n\
echo "Error: Ollama failed to start within 60 seconds"\n\
exit 1\n\
fi\n\
# Pull TinyLlama model\n\
echo "Pulling TinyLlama model..."\n\
ollama pull tinyllama\n\
# Set Ngrok authtoken\n\
echo "Starting Ngrok tunnel..."\n\
ngrok authtoken <your-ngrok-authtoken>\n\
ngrok http 11434 --host-header="localhost:11434" &\n\
# Keep the container running\n\
wait' > /app/start_services.sh
# Make the script executable
RUN chmod +x /app/start_services.sh
# Expose Ollama API port
EXPOSE 11434
# Run the startup script
CMD ["/app/start_services.sh"]