Skip to content

Commit

Permalink
fix docker build
Browse files Browse the repository at this point in the history
  • Loading branch information
vvincent1234 committed Jan 24, 2025
1 parent 748d253 commit b3f6324
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 17 deletions.
2 changes: 2 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
data
tmp
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ WORKDIR /app

# Copy requirements and install Python dependencies
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple

# Install Playwright and browsers with system dependencies
ENV PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
Expand Down
4 changes: 0 additions & 4 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,9 @@ services:
- RESOLUTION_WIDTH=${RESOLUTION_WIDTH:-1920}
- RESOLUTION_HEIGHT=${RESOLUTION_HEIGHT:-1080}
- VNC_PASSWORD=${VNC_PASSWORD:-vncpassword}
- PERSISTENT_BROWSER_PORT=9222
- PERSISTENT_BROWSER_HOST=localhost
- CHROME_DEBUGGING_PORT=9222
- CHROME_DEBUGGING_HOST=localhost
volumes:
- ./data:/app/data
- ./data/chrome_data:/app/data/chrome_data
- /tmp/.X11-unix:/tmp/.X11-unix
restart: unless-stopped
shm_size: '2gb'
Expand Down
2 changes: 1 addition & 1 deletion src/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def get_llm_model(provider: str, **kwargs):
return ChatOllama(
model=kwargs.get("model_name", "qwen2.5:7b"),
temperature=kwargs.get("temperature", 0.0),
num_ctx=128000,
num_ctx=kwargs.get("num_ctx", 32000),
base_url=kwargs.get("base_url", "http://localhost:11434"),
)
elif provider == "azure_openai":
Expand Down
2 changes: 1 addition & 1 deletion supervisord.conf
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ depends_on=x11vnc

[program:persistent_browser]
command=bash -c 'mkdir -p /app/data/chrome_data && sleep 8 && google-chrome --user-data-dir=/app/data/chrome_data --window-position=0,0 --window-size=%(ENV_RESOLUTION_WIDTH)s,%(ENV_RESOLUTION_HEIGHT)s --start-maximized --no-sandbox --disable-dev-shm-usage --disable-gpu --disable-software-rasterizer --disable-setuid-sandbox --no-first-run --no-default-browser-check --no-experiments --ignore-certificate-errors --remote-debugging-port=9222 --remote-debugging-address=0.0.0.0 "data:text/html,<html><body style=\"background: \#f0f0f0; margin: 0; display: flex; justify-content: center; align-items: center; height: 100vh; font-family: Arial;\"><h1>Browser Ready for AI Interaction</h1></body></html>"'
autorestart=%(ENV_CHROME_PERSISTENT_SESSION)s
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
Expand Down
16 changes: 8 additions & 8 deletions tests/test_browser_use.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,18 +247,18 @@ async def test_browser_use_custom_v2():
# api_key=os.getenv("GOOGLE_API_KEY", "")
# )

llm = utils.get_llm_model(
provider="deepseek",
model_name="deepseek-chat",
temperature=0.8
)

# llm = utils.get_llm_model(
# provider="ollama", model_name="qwen2.5:7b", temperature=0.8
# provider="deepseek",
# model_name="deepseek-chat",
# temperature=0.8
# )

llm = utils.get_llm_model(
provider="ollama", model_name="qwen2.5:7b", temperature=0.5
)

controller = CustomController()
use_own_browser = True
use_own_browser = False
disable_security = True
use_vision = False # Set to False when using DeepSeek
tool_call_in_content = True # Set to True when using Ollama
Expand Down
4 changes: 2 additions & 2 deletions tests/test_llm_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,5 +127,5 @@ def test_ollama_model():
# test_openai_model()
# test_gemini_model()
# test_azure_openai_model()
test_deepseek_model()
# test_ollama_model()
# test_deepseek_model()
test_ollama_model()

0 comments on commit b3f6324

Please sign in to comment.