Spaces:
Sleeping
Sleeping
Upload 4 files
Browse files- Dockerfile +16 -0
- main.py +37 -0
- main.yml +27 -0
- requirements.txt +12 -0
Dockerfile
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
|
2 |
+
# you will also find guides on how best to write your Dockerfile
|
3 |
+
|
4 |
+
FROM python:3.9
|
5 |
+
|
6 |
+
WORKDIR /code
|
7 |
+
|
8 |
+
COPY ./requirements.txt /code/requirements.txt
|
9 |
+
|
10 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
11 |
+
|
12 |
+
RUN wget https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf/resolve/main/Phi-3-mini-4k-instruct-q4.gguf
|
13 |
+
|
14 |
+
COPY . .
|
15 |
+
|
16 |
+
CMD ["gunicorn", "-b", "0.0.0.0:7860", "main:app"]
|
main.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Flask, request, jsonify
|
2 |
+
from langchain_community.llms import LlamaCpp
|
3 |
+
import os
|
4 |
+
app = Flask(__name__)
|
5 |
+
|
6 |
+
n_gpu_layers = 0
|
7 |
+
n_batch = 1024
|
8 |
+
|
9 |
+
|
10 |
+
llm = LlamaCpp(
|
11 |
+
model_path="Phi-3-mini-4k-instruct-q4.gguf", # path to GGUF file
|
12 |
+
temperature=0.1,
|
13 |
+
n_gpu_layers=n_gpu_layers,
|
14 |
+
n_batch=n_batch,
|
15 |
+
verbose=True,
|
16 |
+
n_ctx=4096
|
17 |
+
)
|
18 |
+
file_size = os.stat('Phi-3-mini-4k-instruct-q4.gguf')
|
19 |
+
print("model size ====> :", file_size.st_size, "bytes")
|
20 |
+
|
21 |
+
|
22 |
+
@app.route('/', methods=['POST'])
|
23 |
+
def get_skills():
|
24 |
+
cv_body = request.json.get('cv_body')
|
25 |
+
|
26 |
+
# Simple inference example
|
27 |
+
output = llm(
|
28 |
+
f"<|user|>\n{cv_body}<|end|>\n<|assistant|>Can you list the skills mentioned in the CV?<|end|>",
|
29 |
+
max_tokens=256, # Generate up to 256 tokens
|
30 |
+
stop=["<|end|>"],
|
31 |
+
echo=True, # Whether to echo the prompt
|
32 |
+
)
|
33 |
+
|
34 |
+
return jsonify({'skills': output})
|
35 |
+
|
36 |
+
if __name__ == '__main__':
|
37 |
+
app.run()
|
main.yml
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Python application
|
2 |
+
|
3 |
+
on:
|
4 |
+
push:
|
5 |
+
branches: [ main ]
|
6 |
+
pull_request:
|
7 |
+
branches: [ main ]
|
8 |
+
|
9 |
+
jobs:
|
10 |
+
build:
|
11 |
+
runs-on: ubuntu-latest
|
12 |
+
|
13 |
+
steps:
|
14 |
+
- uses: actions/checkout@v2
|
15 |
+
|
16 |
+
- name: Set up Python 3.x
|
17 |
+
uses: actions/setup-python@v2
|
18 |
+
with:
|
19 |
+
python-version: '3.x'
|
20 |
+
|
21 |
+
- name: Install dependencies
|
22 |
+
run: |
|
23 |
+
python -m pip install --upgrade pip
|
24 |
+
pip install -r requirements.txt
|
25 |
+
|
26 |
+
- name: Run the app
|
27 |
+
run: python app.py
|
requirements.txt
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
flask
|
2 |
+
matplotlib
|
3 |
+
numpy
|
4 |
+
gensim
|
5 |
+
scikit-learn
|
6 |
+
llama-cpp-python
|
7 |
+
huggingface-hub
|
8 |
+
langchain
|
9 |
+
langchain-experimental
|
10 |
+
scipy==1.10.1
|
11 |
+
gunicorn
|
12 |
+
langchain-community
|