Spaces:
Sleeping
Sleeping
Upload 3 files
Browse files- Dockerfile +5 -16
- README.md +3 -17
- app.py +29 -0
Dockerfile
CHANGED
@@ -1,21 +1,10 @@
|
|
1 |
-
FROM python:3.
|
2 |
|
3 |
WORKDIR /app
|
4 |
|
5 |
-
|
6 |
-
|
7 |
-
curl \
|
8 |
-
software-properties-common \
|
9 |
-
git \
|
10 |
-
&& rm -rf /var/lib/apt/lists/*
|
11 |
|
12 |
-
COPY
|
13 |
-
COPY src/ ./src/
|
14 |
|
15 |
-
|
16 |
-
|
17 |
-
EXPOSE 8501
|
18 |
-
|
19 |
-
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
|
20 |
-
|
21 |
-
ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
|
|
|
1 |
+
FROM python:3.10-slim
|
2 |
|
3 |
WORKDIR /app
|
4 |
|
5 |
+
COPY requirements.txt .
|
6 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
|
|
|
|
|
|
|
|
7 |
|
8 |
+
COPY . .
|
|
|
9 |
|
10 |
+
CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.enableCORS=false"]
|
|
|
|
|
|
|
|
|
|
|
|
README.md
CHANGED
@@ -1,19 +1,5 @@
|
|
1 |
-
|
2 |
-
title: MaaGpt
|
3 |
-
emoji: π
|
4 |
-
colorFrom: red
|
5 |
-
colorTo: red
|
6 |
-
sdk: docker
|
7 |
-
app_port: 8501
|
8 |
-
tags:
|
9 |
-
- streamlit
|
10 |
-
pinned: false
|
11 |
-
short_description: Recipes with a Motherβs Touch
|
12 |
-
---
|
13 |
|
14 |
-
|
15 |
|
16 |
-
|
17 |
-
|
18 |
-
If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
|
19 |
-
forums](https://discuss.streamlit.io).
|
|
|
1 |
+
# MaaGPT π΅π²
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
+
MaaGPT is a Streamlit app powered by Phi-2 LLM. It acts like a sweet Indian mother who gives advice about food with love and sass.
|
4 |
|
5 |
+
π§‘ Try asking for pizza or Maggi and see how Maa reacts.
|
|
|
|
|
|
app.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
3 |
+
|
4 |
+
@st.cache_resource
|
5 |
+
def load_model():
|
6 |
+
model_id = "microsoft/phi-2"
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
8 |
+
model = AutoModelForCausalLM.from_pretrained(model_id)
|
9 |
+
return pipeline("text-generation", model=model, tokenizer=tokenizer)
|
10 |
+
|
11 |
+
generator = load_model()
|
12 |
+
|
13 |
+
def maa_prompt(user_input):
|
14 |
+
return f"""
|
15 |
+
You are a caring Indian mother. If your child asks for junk food (like chips, pizza, maggi, etc.), respond with love and a little scolding.
|
16 |
+
|
17 |
+
Child: {user_input}
|
18 |
+
Mom:"""
|
19 |
+
|
20 |
+
st.set_page_config(page_title="MaaGPT", page_icon="π΅")
|
21 |
+
st.title("π΅ MaaGPT: Your Indian Mom AI")
|
22 |
+
|
23 |
+
user_input = st.text_input("What do you feel like eating, beta? π")
|
24 |
+
|
25 |
+
if user_input:
|
26 |
+
prompt = maa_prompt(user_input)
|
27 |
+
result = generator(prompt, max_new_tokens=100, do_sample=True)[0]["generated_text"]
|
28 |
+
response = result.split("Mom:")[-1].strip()
|
29 |
+
st.markdown(f"### π΅ Maa says:\n\n{response}")
|