Dan Flower commited on
Commit
623af80
·
1 Parent(s): 991d397

model download issues

Browse files
Files changed (2) hide show
  1. app.py +3 -3
  2. model/download_model.py +1 -1
app.py CHANGED
@@ -4,7 +4,7 @@ import sys
4
  import streamlit as st
5
 
6
  # Environment setup
7
- os.environ["MODEL_PATH"] = "/data/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
8
  os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
9
  os.environ["XDG_CONFIG_HOME"] = "/tmp/.streamlit"
10
  os.environ["BROWSER_GATHER_USAGE_STATS"] = "false"
@@ -13,11 +13,11 @@ os.environ["HF_HUB_CACHE"] = "/tmp/hf_cache"
13
  # Create required directories
14
  os.makedirs("/tmp/.streamlit", exist_ok=True)
15
  os.makedirs("/tmp/hf_cache", exist_ok=True)
16
- os.makedirs("/data/models", exist_ok=True)
17
 
18
  # Runtime model download if needed
19
 
20
- MODEL_PATH = "/data/models/TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf"
21
  if not os.path.exists(MODEL_PATH):
22
  st.warning("Model not found. Downloading...")
23
  try:
 
4
  import streamlit as st
5
 
6
  # Environment setup
7
+ os.environ["MODEL_PATH"] = "/tmp/models/tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf"
8
  os.environ["STREAMLIT_HOME"] = "/tmp/.streamlit"
9
  os.environ["XDG_CONFIG_HOME"] = "/tmp/.streamlit"
10
  os.environ["BROWSER_GATHER_USAGE_STATS"] = "false"
 
13
  # Create required directories
14
  os.makedirs("/tmp/.streamlit", exist_ok=True)
15
  os.makedirs("/tmp/hf_cache", exist_ok=True)
16
+ os.makedirs("/tmp/models", exist_ok=True)
17
 
18
  # Runtime model download if needed
19
 
20
+ MODEL_PATH = "/tmp/models/TinyLlama-1.1B-Chat-v1.0.Q4_K_M.gguf"
21
  if not os.path.exists(MODEL_PATH):
22
  st.warning("Model not found. Downloading...")
23
  try:
model/download_model.py CHANGED
@@ -13,7 +13,7 @@ model_path = hf_hub_download(
13
  repo_id="TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF",
14
  filename="tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf",
15
  repo_type="model",
16
- local_dir="/data/models",
17
  token=token,
18
  )
19
 
 
13
  repo_id="TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF",
14
  filename="tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf",
15
  repo_type="model",
16
+ local_dir="/tmp/models",
17
  token=token,
18
  )
19