Wait for api to be available before starting UI

This commit is contained in:
Mayank Chhabra 2023-08-15 23:42:08 +07:00
parent 55a5fbf3b0
commit b0b059a05a
4 changed files with 9 additions and 2 deletions

View File

@ -14,3 +14,4 @@ services:
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
- 'DEFAULT_MODEL=/models/llama-2-13b-chat.bin'
- 'WAIT_HOSTS=llama-gpt-api:8000'

View File

@ -14,3 +14,4 @@ services:
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
- 'DEFAULT_MODEL=/models/llama-2-70b-chat.bin'
- 'WAIT_HOSTS=llama-gpt-api:8000'

View File

@ -20,3 +20,4 @@ services:
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
- 'DEFAULT_MODEL=/models/llama-2-7b-chat.bin'
- 'WAIT_HOSTS=llama-gpt-api:8000'

View File

@ -22,8 +22,12 @@ COPY --from=build /app/package*.json ./
COPY --from=build /app/next.config.js ./next.config.js
COPY --from=build /app/next-i18next.config.js ./next-i18next.config.js
## Add the wait script to the image
COPY --from=ghcr.io/ufoscout/docker-compose-wait:latest /wait /wait
# Expose the port the app will run on
EXPOSE 3000
# Start the application
CMD ["npm", "start"]
# Start the application after the API is ready
CMD /wait && npm start