Wait for api to be available before starting UI
This commit is contained in:
parent
55a5fbf3b0
commit
b0b059a05a
|
@ -14,3 +14,4 @@ services:
|
|||
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
|
||||
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
|
||||
- 'DEFAULT_MODEL=/models/llama-2-13b-chat.bin'
|
||||
- 'WAIT_HOSTS=llama-gpt-api:8000'
|
||||
|
|
|
@ -14,3 +14,4 @@ services:
|
|||
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
|
||||
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
|
||||
- 'DEFAULT_MODEL=/models/llama-2-70b-chat.bin'
|
||||
- 'WAIT_HOSTS=llama-gpt-api:8000'
|
||||
|
|
|
@ -20,3 +20,4 @@ services:
|
|||
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
|
||||
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
|
||||
- 'DEFAULT_MODEL=/models/llama-2-7b-chat.bin'
|
||||
- 'WAIT_HOSTS=llama-gpt-api:8000'
|
||||
|
|
|
@ -22,8 +22,12 @@ COPY --from=build /app/package*.json ./
|
|||
COPY --from=build /app/next.config.js ./next.config.js
|
||||
COPY --from=build /app/next-i18next.config.js ./next-i18next.config.js
|
||||
|
||||
## Add the wait script to the image
|
||||
COPY --from=ghcr.io/ufoscout/docker-compose-wait:latest /wait /wait
|
||||
|
||||
# Expose the port the app will run on
|
||||
EXPOSE 3000
|
||||
|
||||
# Start the application
|
||||
CMD ["npm", "start"]
|
||||
# Start the application after the API is ready
|
||||
CMD /wait && npm start
|
||||
|
||||
|
|
Loading…
Reference in New Issue