log request also

This commit is contained in:
Muzhen Gaming
2025-10-16 10:06:47 +08:00
parent 1f9f921829
commit 5901254405
3 changed files with 204 additions and 30 deletions

View File

@@ -30,6 +30,7 @@
- PowerShell: ``$env:BG_AGENT_DEBUG='1'`` (or ``$env:DEBUG='1'``)
- Cmd: ``set BG_AGENT_DEBUG=1``
- When enabled, logs are written to `%LOCALAPPDATA%\BgVisionAgent\agent.log` at DEBUG level.
- Additionally, the agent saves full OpenAI HTTP request/response JSON files (URL, headers, payload, status, headers, body) in `%LOCALAPPDATA%\BgVisionAgent\http`. Filenames include timestamps and attempt numbers. Secrets are redacted from headers.
- When not enabled, only warnings/errors go to stderr; no log file is written.
**Notes**

View File

@@ -17,6 +17,11 @@ from PIL import ImageGrab
import keyboard
from .config import Settings, ensure_dirs, data_paths
from .debug_http import chat_completion_with_logging, log_attempt_error
def _now_stamp() -> str:
return dt.datetime.now().strftime("%Y%m%d-%H%M%S")
class State:
@@ -99,10 +104,6 @@ def _setup_logging(log_path: str):
# No file logging when debug is off
def _now_stamp() -> str:
return dt.datetime.now().strftime("%Y%m%d-%H%M%S")
def capture_active_window(state: State):
"""Capture the current active window (Windows). Fallback to full screen if needed."""
logging.debug("capture_active_window: start; captures_dir=%s", state.captures_dir)
@@ -187,34 +188,49 @@ def send_to_openai(state: State):
attempts = max(1, state.cfg.retries)
last_err = None
for i in range(attempts):
try:
logging.debug("send_to_openai: attempt %d/%d", i + 1, attempts)
resp = client.chat.completions.create(
model=state.cfg.model,
messages=[{"role": "user", "content": content_items}],
)
text = resp.choices[0].message.content or ""
state.response_text = text
try:
for i in range(attempts):
try:
with open(state.response_path, "w", encoding="utf-8") as f:
f.write(text)
except Exception:
pass
logging.info("OpenAI response received and stored.")
logging.debug(
"send_to_openai: response_len=%d written_to=%s",
len(text),
state.response_path,
)
return
except Exception as e:
last_err = e
backoff = min(8, 2 ** i)
logging.warning(f"OpenAI send failed (attempt {i+1}/{attempts}): {e}; retrying in {backoff}s")
time.sleep(backoff)
logging.debug("send_to_openai: attempt %d/%d", i + 1, attempts)
messages = [{"role": "user", "content": content_items}]
text = chat_completion_with_logging(
client,
base,
api_key,
model=state.cfg.model,
messages=messages,
app_dir=state.cfg.app_dir,
attempt=i + 1,
)
logging.exception(f"All attempts to send to OpenAI failed: {last_err}")
state.response_text = text
try:
with open(state.response_path, "w", encoding="utf-8") as f:
f.write(text)
except Exception:
pass
logging.info("OpenAI response received and stored.")
logging.debug(
"send_to_openai: response_len=%d written_to=%s",
len(text),
state.response_path,
)
return
except Exception as e:
last_err = e
backoff = min(8, 2 ** i)
logging.warning(
f"OpenAI send failed (attempt {i+1}/{attempts}): {e}; retrying in {backoff}s"
)
log_attempt_error(state.cfg.app_dir, i + 1, e)
time.sleep(backoff)
logging.exception(f"All attempts to send to OpenAI failed: {last_err}")
finally:
try:
client.close()
except Exception:
pass
def type_response(state: State):

157
bg_agent/debug_http.py Normal file
View File

@@ -0,0 +1,157 @@
import os
import json
import datetime as dt
import logging
from pathlib import Path
def _debug_enabled() -> bool:
for name in ("BG_AGENT_DEBUG", "DEBUG"):
v = os.environ.get(name)
if v and str(v).strip().lower() in {"1", "true", "yes", "on"}:
return True
return False
def _now_stamp() -> str:
return dt.datetime.now().strftime("%Y%m%d-%H%M%S")
def _iso_now() -> str:
return dt.datetime.now().isoformat(timespec="seconds")
def _join_url(base: str, endpoint: str) -> str:
return f"{str(base).rstrip('/')}/{str(endpoint).lstrip('/')}"
def _redact(value: str) -> str:
if not value:
return value
return "***REDACTED***"
def _write_json(path: str, data: dict) -> None:
try:
Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True)
with open(path, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=None, separators=(",", ":"))
except Exception as e:
logging.debug("debug_http: failed to write %s: %s", path, e)
def chat_completion_with_logging(client, base_url: str, api_key: str, *, model: str, messages: list, app_dir: str, attempt: int) -> str:
"""Perform a chat.completions.create call and, if debug is enabled, write
request/response JSON files containing URL, headers (sanitized), payload, and body.
Returns the assistant text content.
"""
# Build request
payload = {"model": model, "messages": messages}
if _debug_enabled():
tag = f"{_now_stamp()}-chat_completions-attempt{attempt}"
http_dir = os.path.join(app_dir, "http")
req_path = os.path.join(http_dir, f"{tag}-request.json")
req_headers = {
"Authorization": _redact(f"Bearer {api_key}"),
"Content-Type": "application/json",
"Accept": "application/json",
}
_write_json(req_path, {
"timestamp": _iso_now(),
"attempt": attempt,
"method": "POST",
"url": _join_url(base_url, "/chat/completions"),
"headers": req_headers,
"payload": payload,
})
# Prefer raw response for headers/status when available, but keep it optional
text = ""
used_raw = False
with_raw = None
try:
with_raw = getattr(getattr(client.chat.completions, "with_raw_response"), "create")
used_raw = True
except Exception:
used_raw = False
if used_raw and with_raw:
raw = with_raw(**payload)
http_resp = getattr(raw, "http_response", raw)
try:
body = http_resp.json()
except Exception:
try:
body = json.loads(getattr(http_resp, "text", "") or "{}")
except Exception:
body = {"_note": "non-JSON response"}
try:
text = (
body.get("choices", [{}])[0]
.get("message", {})
.get("content", "")
)
except Exception:
text = ""
if _debug_enabled():
tag = f"{_now_stamp()}-chat_completions-attempt{attempt}"
http_dir = os.path.join(app_dir, "http")
resp_path = os.path.join(http_dir, f"{tag}-response.json")
headers_dict = {}
try:
headers_dict = dict(getattr(http_resp, "headers", {}) or {})
except Exception:
headers_dict = {}
_write_json(resp_path, {
"timestamp": _iso_now(),
"attempt": attempt,
"status_code": getattr(http_resp, "status_code", None),
"reason": getattr(http_resp, "reason_phrase", None),
"headers": headers_dict,
"body": body,
})
return text
# Fallback: normal SDK call
resp = client.chat.completions.create(**payload)
try:
text = resp.choices[0].message.content or ""
except Exception:
text = ""
if _debug_enabled():
tag = f"{_now_stamp()}-chat_completions-attempt{attempt}"
http_dir = os.path.join(app_dir, "http")
resp_path = os.path.join(http_dir, f"{tag}-response.json")
body = None
try:
if hasattr(resp, "model_dump_json"):
body = json.loads(resp.model_dump_json())
elif hasattr(resp, "model_dump"):
body = resp.model_dump(mode="python") # type: ignore[arg-type]
elif hasattr(resp, "to_dict"):
body = resp.to_dict()
except Exception:
body = {"_note": "unable to serialize response model"}
_write_json(resp_path, {"timestamp": _iso_now(), "attempt": attempt, "body": body})
return text
def log_attempt_error(app_dir: str, attempt: int, error: Exception) -> None:
if not _debug_enabled():
return
http_dir = os.path.join(app_dir, "http")
tag = f"{_now_stamp()}-chat_completions-attempt{attempt}-error"
err_path = os.path.join(http_dir, f"{tag}.json")
_write_json(err_path, {
"timestamp": _iso_now(),
"attempt": attempt,
"error": str(error),
"type": getattr(type(error), "__name__", "Exception"),
})