#!/bin/bash # Start Ollama server in the background ollama serve & # Wait for the server to be ready while ! nc -z localhost 7860; do echo "Waiting for Ollama server to start..." sleep 1 done # Pull the model echo "Pulling the model..." ollama pull nomic-embed-text ollama pull all-minilm # Keep the container running wait # curl https://subhrajit-mohanty-custom-embedding-server.hf.space/api/embeddings -d '{ # "model": "nomic-embed-text", # "prompt": "The sky is blue because of Rayleigh scattering" # }'