maj Dockerfile à tester

argos-translate
Grégory Lebreton 8 months ago
parent 1ab2b95d2a
commit 931fc417ff

@ -1,8 +1,8 @@
FROM python:3.11-slim-bookworm
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
COPY . .
WORKDIR /app
COPY app/. .
RUN pip3 install -r requirements.txt
#CMD ["gunicorn", "-w", "4", "wsgi:app", "--bind", "0.0.0.0:8000"]
CMD [ "python3", "-m" , "flask", "run", "--host=0.0.0.0"]

@ -84,14 +84,15 @@ docker compose logs -f
:white_check_mark: Flask app frontend
:white_check_mark: authentification Keycloak -> https://git.legaragenumerique.fr/GARAGENUM/flask-keycloak
:white_check_mark: wsgi.py for prod + DNS
- [ ] ajouter config Nginx (ai.domaine.tld + image.domaine.tld)
:white_check_mark: Traduction via [Argos-Translate](https://github.com/argosopentech/argos-translate) :gb: -> :fr: et vice-versa
- [ ] shoot OIDC for kc-gatekeeper
- [ ] ajouter config Nginx (ai.domaine.tld + image.domaine.tld)
- [ ] bouton stop generating ?
- [ ] bouton home
- [ ] bouton home ?
- [ ] conserver context (sqlite / json / session ?)
:white_check_mark: Traduction via [Argos-Translate](https://github.com/argosopentech/argos-translate) :gb: -> :fr: et vice-versa
- [ ] restart container si timeout
- [ ] utiliser GPU
- [ ] entraîner avec big GPU
- [ ] utiliser GPU (cuda docker image)
- [ ] entraîner avec big GPU
### bugs :ghost:

@ -1,19 +1,4 @@
#########################LOCALAI#########################
#########################FLASK-UI#########################
# local-ai quand flask dockerisé
LOCALAI_HOST=local-ai
MODELS_PATH=/models
DEBUG=true
REBUILD=true
#THREADS=4
DEFAULT_MODEL=gpt-3.5-turbo
#PRELOAD_MODELS=[{"url":"github:go-skynet/model-gallery/gpt4all-j.yaml","name":"gpt-3.5-turbo"},{"url":"github:go-skynet/model-gallery/stablediffusion.yaml","name":"stablediffusion"}]
#DEFAULT_MODEL=wizard-lm
#PRELOAD_MODELS=[{"url":"github:go-skynet/model-gallery/openllama_7b.yaml","name":"open_llama"}]
#GALLERIES=[{"name":"model-gallery","url":"github:go-skynet/model-gallery/index.yaml"}]

@ -23,7 +23,9 @@ services:
flask-ui:
build:
context: .
image: local/flask-ui:argos
image: flask-ui:2.0
build:
context: .
container_name: flask-ui
restart: always
ports:

@ -1,9 +0,0 @@
import openai
openai.api_base = "http://localhost:8080/v1"
# create a chat completion
chat_completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
# print the completion
print(completion.choices[0].message.content)
Loading…
Cancel
Save