diff --git a/.dockerignore b/.dockerignore index 76082208..60640f51 100644 --- a/.dockerignore +++ b/.dockerignore @@ -10,6 +10,7 @@ **/.venv **/.vscode *.env* +!/.env # Streamlit ignores **/secrets*.toml diff --git a/Dockerfile b/Dockerfile index dd0aecd7..9bec9ea4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,5 @@ -FROM python:3.11 AS requirements-stage +FROM python:3.11 as requirements-stage +# Run `skyvern init llm` before building to generate the .env file WORKDIR /tmp RUN pip install poetry diff --git a/README.md b/README.md index a31a1cd8..2fe72b5a 100644 --- a/README.md +++ b/README.md @@ -193,6 +193,7 @@ skyvern status 1. Make sure you have [Docker Desktop](https://www.docker.com/products/docker-desktop/) installed and running on your machine 1. Make sure you don't have postgres running locally (Run `docker ps` to check) 1. Clone the repository and navigate to the root directory +1. Run `skyvern init llm` to generate a `.env` file. This will be copied into the Docker image. 1. Fill in the LLM provider key on the [docker-compose.yml](./docker-compose.yml). *If you want to run Skyvern on a remote server, make sure you set the correct server ip for the UI container in [docker-compose.yml](./docker-compose.yml).* 2. Run the following command via the commandline: ```bash diff --git a/docker-compose.yml b/docker-compose.yml index 1f0b82dd..9cd25dfc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,6 +21,8 @@ services: skyvern: image: public.ecr.aws/skyvern/skyvern:latest restart: on-failure + env_file: + - .env # comment out if you want to externally call skyvern API ports: - 8000:8000 @@ -45,7 +47,7 @@ services: # - BROWSER_REMOTE_DEBUGGING_URL=http://host.docker.internal:9222/ # ========================= - # LLM Settings + # LLM Settings - Recommended to use skyvern CLI, `skyvern init llm` to setup your LLM's # ========================= # OpenAI Support: # If you want to use OpenAI as your LLM provider, uncomment the following lines and fill in your OpenAI API key. @@ -54,10 +56,10 @@ services: # - OPENAI_API_KEY= # Gemini Support: # Gemini is a new LLM provider that is currently in beta. You can use it by uncommenting the following lines and filling in your Gemini API key. - - LLM_KEY=GEMINI - - ENABLE_GEMINI=true - - GEMINI_API_KEY=YOUR_GEMINI_KEY - - LLM_KEY=GEMINI_2.5_PRO_PREVIEW_03_25 + # - LLM_KEY=GEMINI + # - ENABLE_GEMINI=true + # - GEMINI_API_KEY=YOUR_GEMINI_KEY + # - LLM_KEY=GEMINI_2.5_PRO_PREVIEW_03_25 # If you want to use other LLM provider, like azure and anthropic: # - ENABLE_ANTHROPIC=true # - LLM_KEY=ANTHROPIC_CLAUDE3.5_SONNET