diff --git a/examples/browser_scoped.py b/examples/browser_scoped.py new file mode 100644 index 00000000..e409517a --- /dev/null +++ b/examples/browser_scoped.py @@ -0,0 +1,19 @@ +"""Example: browser-scoped client for browser VM process exec and raw HTTP.""" + +from kernel import Kernel + +# After creating or loading a browser session (with base_url + cdp_ws_url from the API): +# browser = client.browsers.create(...) +# scoped = client.for_browser(browser) +# scoped.process.exec(command="uname", args=["-a"]) +# r = scoped.request("GET", "https://example.com") +# with scoped.stream("GET", "https://example.com") as resp: +# print(resp.read()) + + +def main() -> None: + _ = Kernel + + +if __name__ == "__main__": + main() diff --git a/scripts/generate_browser_scoped.py b/scripts/generate_browser_scoped.py new file mode 100644 index 00000000..2b8a03b2 --- /dev/null +++ b/scripts/generate_browser_scoped.py @@ -0,0 +1,652 @@ +#!/usr/bin/env python3 +"""Generate browser-scoped binding classes from AST of src/kernel/resources/browsers/**.""" +# pyright: reportUnknownParameterType=false, reportUnknownVariableType=false, reportUnknownMemberType=false, reportUnknownArgumentType=false, reportUndefinedVariable=false, reportUnusedVariable=false + +from __future__ import annotations + +import ast +from typing import Iterable +from pathlib import Path +from dataclasses import dataclass + + +@dataclass(frozen=True) +class IdBinding: + kind: str # "positional" | "kwonly" + + +def _repo_root() -> Path: + return Path(__file__).resolve().parent.parent + + +def _browsers_root() -> Path: + return _repo_root() / "src/kernel/resources/browsers" + + +def _iter_browser_py_files() -> list[Path]: + root = _browsers_root() + out: list[Path] = [] + for p in sorted(root.rglob("*.py")): + if p.name in ("__init__.py", "browsers.py"): + continue + out.append(p) + return out + + +def _is_resource_class(node: ast.ClassDef) -> bool: + if not node.name.endswith("Resource"): + return False + if node.name.startswith("Async"): + return False + if "With" in node.name: + return False + for b in node.bases: + if isinstance(b, ast.Name) and b.id == "SyncAPIResource": + return True + if isinstance(b, ast.Attribute) and b.attr == "SyncAPIResource": + return True + return False + + +def _is_async_resource_class(node: ast.ClassDef) -> bool: + if not node.name.startswith("Async") or not node.name.endswith("Resource"): + return False + if "With" in node.name: + return False + for b in node.bases: + if isinstance(b, ast.Name) and b.id == "AsyncAPIResource": + return True + if isinstance(b, ast.Attribute) and b.attr == "AsyncAPIResource": + return True + return False + + +def _async_resource_name(sync_name: str) -> str: + if sync_name.startswith("Async"): + return sync_name + return f"Async{sync_name}" + + +def _has_cached_property_decorator(node: ast.FunctionDef | ast.AsyncFunctionDef) -> bool: + for d in node.decorator_list: + if isinstance(d, ast.Name) and d.id == "cached_property": + return True + if isinstance(d, ast.Attribute) and d.attr == "cached_property": + return True + return False + + +def _annotation_root_name(node: ast.AST | None) -> str | None: + if node is None: + return None + if isinstance(node, ast.Name): + return node.id + if isinstance(node, ast.Subscript): + return _annotation_root_name(node.value) + if isinstance(node, ast.Attribute): + return node.attr + if isinstance(node, ast.BinOp) and isinstance(node.op, ast.BitOr): + return _annotation_root_name(node.left) or _annotation_root_name(node.right) + return None + + +def _find_id_binding(arguments: ast.arguments) -> IdBinding | None: + pos = list(arguments.posonlyargs) + list(arguments.args) + if len(pos) > 0 and pos[0].arg == "self": + rest = pos[1:] + else: + rest = pos + for a in rest: + if a.arg == "id": + return IdBinding("positional") + for a in arguments.kwonlyargs: + if a.arg == "id": + return IdBinding("kwonly") + return None + + +def _strip_id_from_arguments(arguments: ast.arguments) -> ast.arguments: + """Remove `id` from positional and keyword-only parameters; fix defaults tail.""" + + posonly = list(arguments.posonlyargs) + pos = list(arguments.args) + combined = posonly + pos + defaults = list(arguments.defaults or []) + nd = len(defaults) + + kept: list[tuple[ast.arg, ast.expr | None, str]] = [] + for i, a in enumerate(combined): + if a.arg == "id": + continue + d: ast.expr | None = None + if nd and i >= len(combined) - nd: + d = defaults[i - (len(combined) - nd)] + kind = "posonly" if i < len(posonly) else "pos" + kept.append((a, d, kind)) + + new_posonly = [a for a, _d, k in kept if k == "posonly"] + new_pos = [a for a, _d, k in kept if k == "pos"] + new_combined = new_posonly + new_pos + new_defaults_list = [d for a, d, k in kept if d is not None] + new_nd = len(new_defaults_list) + if new_nd > len(new_combined): + raise RuntimeError("invalid defaults after strip") + new_defaults = new_defaults_list[-new_nd:] if new_nd else [] + + kwonly = [a for a in arguments.kwonlyargs if a.arg != "id"] + kw_defaults_old = list(arguments.kw_defaults or []) + new_kw_defaults: list[ast.expr | None] = [] + for i, a in enumerate(arguments.kwonlyargs): + d = kw_defaults_old[i] if i < len(kw_defaults_old) else None + if a.arg != "id": + new_kw_defaults.append(d) + + return ast.arguments( + posonlyargs=new_posonly, + args=new_pos, + kwonlyargs=kwonly, + kw_defaults=new_kw_defaults, + defaults=new_defaults, + vararg=arguments.vararg, + kwarg=arguments.kwarg, + ) + + +def _public_signature(inner: ast.FunctionDef | ast.AsyncFunctionDef) -> ast.arguments: + if _find_id_binding(inner.args) is None: + return inner.args + return _strip_id_from_arguments(inner.args) + + +def _without_leading_self(arguments: ast.arguments) -> ast.arguments: + """Drop `self` from positional args for use in subclass method signatures (posonly unused here).""" + + if arguments.posonlyargs: + raise RuntimeError("positional-only parameters are not supported for browser binding generation") + args = list(arguments.args) + defaults = list(arguments.defaults or []) + if not args or args[0].arg != "self": + raise RuntimeError("expected leading self parameter") + new_args = args[1:] + n_old = len(args) + n_new = len(new_args) + nd = len(defaults) + if nd: + if nd > n_old: + raise RuntimeError("too many defaults") + new_defaults = defaults[-min(nd, n_new) :] if n_new else [] + else: + new_defaults = [] + return ast.arguments( + posonlyargs=[], + args=new_args, + kwonlyargs=list(arguments.kwonlyargs), + kw_defaults=list(arguments.kw_defaults or []), + defaults=new_defaults, + vararg=arguments.vararg, + kwarg=arguments.kwarg, + ) + + +def _emit_call_forward(inner_name: str, inner: ast.FunctionDef | ast.AsyncFunctionDef) -> str: + binding = _find_id_binding(inner.args) + pos_all = list(inner.args.posonlyargs) + list(inner.args.args) + if not pos_all or pos_all[0].arg != "self": + raise RuntimeError(f"expected self first on {inner_name}") + rest_pos = pos_all[1:] + + pos_call: list[str] = [] + for a in rest_pos: + if a.arg == "id": + pos_call.append("self._session_id") + else: + pos_call.append(a.arg) + + kw_parts: list[str] = [] + for a in inner.args.kwonlyargs: + if a.arg == "id": + kw_parts.append("id=self._session_id") + else: + kw_parts.append(f"{a.arg}={a.arg}") + + if inner.args.vararg is not None or inner.args.kwarg is not None: + raise RuntimeError(f"unsupported vararg/kwarg on {inner_name}") + + if binding is None: + inner_pos = ", ".join(a.arg for a in rest_pos) + inner_kw = ", ".join(f"{a.arg}={a.arg}" for a in inner.args.kwonlyargs) + bits = [inner_pos] if inner_pos else [] + if inner_kw: + bits.append(inner_kw) + return f"self._inner.{inner_name}({', '.join(bits)})" + + return f"self._inner.{inner_name}({', '.join([*pos_call, *kw_parts])})" + + +def _emit_method( + inner: ast.FunctionDef | ast.AsyncFunctionDef, + *, + is_async: bool, +) -> str | None: + if inner.name.startswith("_"): + return None + if _has_cached_property_decorator(inner): + return None + + binding = _find_id_binding(inner.args) + if binding is None: + return None + + pub_args = _without_leading_self(_public_signature(inner)) + ret = inner.returns + ret_s = "" if ret is None else f" -> {ast.unparse(ret)}" + prefix = "async def" if is_async else "def" + await_kw = "await " if is_async else "" + body = f"return {await_kw}{_emit_call_forward(inner.name, inner)}" + + args_s = ast.unparse(pub_args) + if args_s.startswith("(") and args_s.endswith(")"): + inner_args = args_s[1:-1].strip() + else: + inner_args = args_s.strip() + if inner_args: + sig_inner = f"self, {inner_args}" + else: + sig_inner = "self" + + lines = [f" {prefix} {inner.name}({sig_inner}){ret_s}:", f" {body}"] + return "\n".join(lines) + + +def _bound_class_name(sync_cls: str) -> str: + return f"Bound{sync_cls}" + + +def _import_line_for_class(file_path: Path, class_name: str) -> str: + rel = file_path.relative_to(_repo_root() / "src/kernel") + mod = ".".join(rel.with_suffix("").parts) + return f"from ...{mod} import {class_name}" + + +def _discover_nested_subresources(sync_class: ast.ClassDef) -> list[tuple[str, str]]: + out: list[tuple[str, str]] = [] + for node in sync_class.body: + if not isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + continue + if not _has_cached_property_decorator(node): + continue + if node.name.startswith("with_"): + continue + root = _annotation_root_name(node.returns) + if root is None: + continue + if not root.endswith("Resource") or root.startswith("Async"): + continue + if "With" in root: + continue + out.append((node.name, root)) + return out + + +def _collect_sync_resource_classes(tree: ast.Module) -> dict[str, ast.ClassDef]: + out: dict[str, ast.ClassDef] = {} + for node in tree.body: + if isinstance(node, ast.ClassDef) and _is_resource_class(node): + out[node.name] = node + return out + + +def _collect_async_resource_classes(tree: ast.Module) -> dict[str, ast.ClassDef]: + out: dict[str, ast.ClassDef] = {} + for node in tree.body: + if isinstance(node, ast.ClassDef) and _is_async_resource_class(node): + out[node.name] = node + return out + + +def _emit_bound_class_pair( + sync_name: str, + sync_cls: ast.ClassDef, + async_cls: ast.ClassDef | None, + nested: dict[str, list[tuple[str, str]]], +) -> str: + bound = _bound_class_name(sync_name) + lines: list[str] = [ + f"class {bound}(ScopedResourceProxy):", + ' """Session id is injected for browser API methods."""', + ] + + for prop_name, inner_cls in nested.get(sync_name, []): + ib = _bound_class_name(inner_cls) + imp = _import_line_for_class(_class_file(inner_cls), inner_cls) + lines.append(" @cached_property") + lines.append(f" def {prop_name}(self) -> {ib}:") + lines.append(f" {imp}") + lines.append(f" return {ib}({inner_cls}(self._inner._client), self._session_id)") + lines.append("") + + for node in sync_cls.body: + if isinstance(node, ast.FunctionDef) and not node.name.startswith("_"): + chunk = _emit_method(node, is_async=False) + if chunk: + lines.append(chunk) + lines.append("") + + if async_cls is not None: + an = _async_resource_name(sync_name) + bound_a = _bound_class_name(an) + lines.append("") + lines.append(f"class {bound_a}(ScopedResourceProxy):") + lines.append(' """Async variant: session id is injected for browser API methods."""') + + for prop_name, inner_cls in nested.get(sync_name, []): + ainner = _async_resource_name(inner_cls) + ib = _bound_class_name(ainner) + imp = _import_line_for_class(_class_file(inner_cls), ainner) + lines.append(" @cached_property") + lines.append(f" def {prop_name}(self) -> {ib}:") + lines.append(f" {imp}") + lines.append(f" return {ib}({ainner}(self._inner._client), self._session_id)") + lines.append("") + + for node in async_cls.body: + if isinstance(node, ast.AsyncFunctionDef) and not node.name.startswith("_"): + chunk = _emit_method(node, is_async=True) + if chunk: + lines.append(chunk) + lines.append("") + + return "\n".join(lines).rstrip() + "\n" + + +_class_file_cache: dict[str, Path] = {} + + +def _index_classes_by_name() -> None: + global _class_file_cache + _class_file_cache = {} + for path in _iter_browser_py_files(): + tree = ast.parse(path.read_text(encoding="utf-8")) + for name in _collect_sync_resource_classes(tree): + _class_file_cache[name] = path + for name in _collect_async_resource_classes(tree): + _class_file_cache[name] = path + + +def _class_file(class_name: str) -> Path: + return _class_file_cache[class_name] + + +def _nested_map() -> dict[str, list[tuple[str, str]]]: + nested: dict[str, list[tuple[str, str]]] = {} + for path in _iter_browser_py_files(): + tree = ast.parse(path.read_text(encoding="utf-8")) + for name, cls in _collect_sync_resource_classes(tree).items(): + pairs = _discover_nested_subresources(cls) + if pairs: + nested[name] = pairs + return nested + + +def _browsers_py_path() -> Path: + return _browsers_root() / "browsers.py" + + +def _cached_property_resource_subresources(cls: ast.ClassDef) -> dict[str, str]: + """prop_name -> sync Resource class name for @cached_property -> XResource style members.""" + + out: dict[str, str] = {} + for node in cls.body: + if not isinstance(node, ast.FunctionDef): + continue + if not _has_cached_property_decorator(node): + continue + if node.name.startswith("with_"): + continue + root = _annotation_root_name(node.returns) + if root is None: + continue + if "With" in root or not root.endswith("Resource"): + continue + if root.startswith("Async"): + continue + out[node.name] = root + return out + + +def _facade_entries_from_browsers_py() -> list[tuple[str, str]]: + """Top-level browser subresources from `BrowsersResource` / `AsyncBrowsersResource` (AST).""" + + path = _browsers_py_path() + tree = ast.parse(path.read_text(encoding="utf-8")) + sync_cls: ast.ClassDef | None = None + async_cls: ast.ClassDef | None = None + for node in tree.body: + if isinstance(node, ast.ClassDef) and node.name == "BrowsersResource": + sync_cls = node + elif isinstance(node, ast.ClassDef) and node.name == "AsyncBrowsersResource": + async_cls = node + if sync_cls is None or async_cls is None: + raise RuntimeError(f"expected BrowsersResource and AsyncBrowsersResource in {path}") + + sync_map = _cached_property_resource_subresources(sync_cls) + async_map: dict[str, str] = {} + for node in async_cls.body: + if not isinstance(node, ast.FunctionDef): + continue + if not _has_cached_property_decorator(node): + continue + if node.name.startswith("with_"): + continue + root = _annotation_root_name(node.returns) + if root is None or "With" in root: + continue + if not (root.startswith("Async") and root.endswith("Resource")): + continue + async_map[node.name] = root + + if set(sync_map) != set(async_map): + raise RuntimeError( + "BrowsersResource vs AsyncBrowsersResource cached_property session resources mismatch: " + f"sync={sorted(sync_map)!r} async={sorted(async_map)!r}" + ) + + for prop in sorted(sync_map): + expected = _async_resource_name(sync_map[prop]) + got = async_map[prop] + if got != expected: + raise RuntimeError(f"{path}: property {prop!r}: expected async return {expected!r}, got {got!r}") + + return sorted(sync_map.items(), key=lambda t: t[0]) + + +def _emit_facade_mixins(entries: list[tuple[str, str]]) -> str: + lines: list[str] = [ + "class BrowserScopedFacadeMixin:", + ' """Top-level browser session subresources (sync); uses `_http` and `session_id`."""', + "", + " _http: Any", + " session_id: str", + "", + ] + for prop, sync_cls in entries: + bound = _bound_class_name(sync_cls) + imp = _import_line_for_class(_class_file(sync_cls), sync_cls) + lines.append(" @cached_property") + lines.append(f" def {prop}(self) -> {bound}:") + lines.append(f" {imp}") + lines.append(f" return {bound}({sync_cls}(self._http), self.session_id)") + lines.append("") + + lines.extend( + [ + "", + "class AsyncBrowserScopedFacadeMixin:", + ' """Top-level browser session subresources (async); uses `_http` and `session_id`."""', + "", + " _http: Any", + " session_id: str", + "", + ] + ) + for prop, sync_cls in entries: + async_cls = _async_resource_name(sync_cls) + bound = _bound_class_name(async_cls) + imp = _import_line_for_class(_class_file(sync_cls), async_cls) + lines.append(" @cached_property") + lines.append(f" def {prop}(self) -> {bound}:") + lines.append(f" {imp}") + lines.append(f" return {bound}({async_cls}(self._http), self.session_id)") + lines.append("") + + return "\n".join(lines).rstrip() + "\n" + + +def _generation_order(all_sync: list[str], nested: dict[str, list[tuple[str, str]]]) -> list[str]: + deps: dict[str, set[str]] = {c: set() for c in all_sync} + for parent, pairs in nested.items(): + for _, inner in pairs: + deps.setdefault(parent, set()).add(inner) + + ordered: list[str] = [] + remaining = set(all_sync) + while remaining: + ready = sorted([c for c in remaining if not (deps.get(c, set()) & remaining)]) + if not ready: + raise RuntimeError(f"cycle in nested resources: {remaining}") + for c in ready: + ordered.append(c) + remaining.remove(c) + return ordered + + +def _path_to_module(path: Path) -> str: + src = _repo_root() / "src" + rel = path.resolve().relative_to(src) + return ".".join(rel.with_suffix("").parts) + + +def _import_from_to_absolute(module_file: Path, imp: ast.ImportFrom) -> ast.ImportFrom: + level = imp.level or 0 + if level == 0: + return imp + cur = _path_to_module(module_file) + pkg = ".".join(cur.split(".")[:-1]) + if level > 1: + pkg_parts = pkg.split(".") + up = level - 1 + if len(pkg_parts) < up: + raise ValueError(f"cannot resolve import {ast.dump(imp)} from {module_file}") + pkg = ".".join(pkg_parts[:-up]) + if imp.module: + base = f"{pkg}.{imp.module}" + else: + base = pkg + return ast.ImportFrom(module=base, names=imp.names, level=0) + + +def _imports_from_resource_modules(paths: Iterable[Path]) -> list[str]: + """Collect imports from resource modules, rewritten as absolute `kernel.*` paths.""" + + def skip_line(line: str) -> bool: + if "from __future__ import annotations" in line: + return True + if "kernel._resource import" in line: + return True + if "kernel._utils import" in line: + return True + if "kernel._base_client import" in line: + return True + if "kernel._compat import cached_property" in line: + return True + return False + + seen: set[str] = set() + lines: list[str] = [] + for path in sorted({p.resolve() for p in paths}): + tree = ast.parse(path.read_text(encoding="utf-8")) + for node in tree.body: + if isinstance(node, ast.ImportFrom): + node = _import_from_to_absolute(path, node) + line = ast.unparse(node) + elif isinstance(node, ast.Import): + line = ast.unparse(node) + else: + continue + if skip_line(line): + continue + if line not in seen: + seen.add(line) + lines.append(line) + return lines + + +def _emit_module() -> str: + _index_classes_by_name() + nested = _nested_map() + all_sync = sorted(n for n in _class_file_cache if not n.startswith("Async")) + order = _generation_order(all_sync, nested) + + resource_paths = {_class_file_cache[name] for name in all_sync} + import_lines = _imports_from_resource_modules(resource_paths) + + parts: list[str] = [ + "# Code generated by scripts/generate_browser_scoped.py. DO NOT EDIT.", + "# ruff: noqa: I001, F401", + "# pyright: reportUnusedImport=false", + "# mypy: ignore-errors", + '"""Browser-scoped wrappers over generated `resources.browsers` classes (AST-driven)."""', + "", + "from __future__ import annotations", + "", + "from typing import Any", + "", + "from ..._compat import cached_property", + "from .util import ScopedResourceProxy", + ] + if import_lines: + parts.append("") + parts.extend(import_lines) + parts.append("") + + for sync_name in order: + path = _class_file_cache[sync_name] + tree = ast.parse(path.read_text(encoding="utf-8")) + sync_cls = _collect_sync_resource_classes(tree)[sync_name] + async_name = _async_resource_name(sync_name) + async_cls = _collect_async_resource_classes(tree).get(async_name) + parts.append(_emit_bound_class_pair(sync_name, sync_cls, async_cls, nested)) + parts.append("") + + facade_entries = _facade_entries_from_browsers_py() + for _prop, facade_sync_name in facade_entries: + if facade_sync_name not in _class_file_cache: + raise RuntimeError(f"facade references unknown resource class {facade_sync_name!r}") + parts.append(_emit_facade_mixins(facade_entries)) + parts.append("") + + export_names: list[str] = [] + for sync_name in sorted(all_sync): + export_names.append(_bound_class_name(sync_name)) + an = _async_resource_name(sync_name) + if an in _class_file_cache and an != sync_name: + export_names.append(_bound_class_name(an)) + + parts.append("__all__ = [") + for n in sorted(set(export_names)): + parts.append(f' "{n}",') + parts.append("]") + parts.append("") + return "\n".join(parts) + + +def main() -> int: + out = _repo_root() / "src/kernel/lib/browser_scoped/generated_bindings.py" + text = _emit_module() + out.write_text(text, encoding="utf-8") + print(f"Wrote {out} ({len(text.splitlines())} lines)") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/lint b/scripts/lint index 7675e607..693344a5 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,6 +4,13 @@ set -e cd "$(dirname "$0")/.." +echo "==> Regenerating browser-scoped bindings" +python3 scripts/generate_browser_scoped.py +rye run ruff format src/kernel/lib/browser_scoped/generated_bindings.py + +echo "==> Verifying generated browser-scoped bindings are committed" +git diff --exit-code -- src/kernel/lib/browser_scoped/generated_bindings.py + if [ "$1" = "--fix" ]; then echo "==> Running lints with --fix" rye run fix:ruff diff --git a/src/kernel/_client.py b/src/kernel/_client.py index 75fe4b64..ad0adfad 100644 --- a/src/kernel/_client.py +++ b/src/kernel/_client.py @@ -319,6 +319,12 @@ def copy( # client.with_options(timeout=10).foo.create(...) with_options = copy + def for_browser(self, browser: Any) -> Any: + """Return a browser-scoped client for session subresources and raw HTTP through the session base_url.""" + from .lib.browser_scoped.client import browser_scoped_from_browser + + return browser_scoped_from_browser(self, browser) + @override def _make_status_error( self, @@ -596,6 +602,12 @@ def copy( # client.with_options(timeout=10).foo.create(...) with_options = copy + def for_browser(self, browser: Any) -> Any: + """Return a browser-scoped client for session subresources and raw HTTP through the session base_url.""" + from .lib.browser_scoped.client import async_browser_scoped_from_browser + + return async_browser_scoped_from_browser(self, browser) + @override def _make_status_error( self, diff --git a/src/kernel/lib/browser_scoped/__init__.py b/src/kernel/lib/browser_scoped/__init__.py new file mode 100644 index 00000000..10e15438 --- /dev/null +++ b/src/kernel/lib/browser_scoped/__init__.py @@ -0,0 +1,3 @@ +from .client import BrowserScopedClient, AsyncBrowserScopedClient + +__all__ = ["BrowserScopedClient", "AsyncBrowserScopedClient"] diff --git a/src/kernel/lib/browser_scoped/browser_session_kernel.py b/src/kernel/lib/browser_scoped/browser_session_kernel.py new file mode 100644 index 00000000..55e25d76 --- /dev/null +++ b/src/kernel/lib/browser_scoped/browser_session_kernel.py @@ -0,0 +1,101 @@ +"""Internal Kernel clones for browser session HTTP (base_url + /browser/kernel paths).""" + +from __future__ import annotations + +from typing import Any, Mapping, cast +from typing_extensions import override + +from ..._client import Kernel, AsyncKernel +from ..._compat import model_copy +from ..._models import FinalRequestOptions + + +class _BrowserSessionKernel(Kernel): + """Kernel clone whose HTTP base is the browser session; strips /browsers/{id} from paths.""" + + _scoped_session_id: str + + def __init__(self, *, browser_session_id: str, **kwargs: Any) -> None: + self._scoped_session_id = browser_session_id + super().__init__(**kwargs) + + @override + def _prepare_options(self, options: FinalRequestOptions) -> FinalRequestOptions: + options = super()._prepare_options(options) + url = options.url + prefix = f"/browsers/{self._scoped_session_id}/" + if not url.startswith(prefix): + return options + suffix = url[len(prefix) :].lstrip("/") + new_url = f"/{suffix}" if suffix else "/" + out = model_copy(options) + out.url = new_url + return out + + +class _BrowserSessionAsyncKernel(AsyncKernel): + _scoped_session_id: str + + def __init__(self, *, browser_session_id: str, **kwargs: Any) -> None: + self._scoped_session_id = browser_session_id + super().__init__(**kwargs) + + @override + async def _prepare_options(self, options: FinalRequestOptions) -> FinalRequestOptions: + options = await super()._prepare_options(options) + url = options.url + prefix = f"/browsers/{self._scoped_session_id}/" + if not url.startswith(prefix): + return options + suffix = url[len(prefix) :].lstrip("/") + new_url = f"/{suffix}" if suffix else "/" + out = model_copy(options) + out.url = new_url + return out + + +def build_browser_session_kernel( + parent: Kernel, *, session_id: str, session_base_url: str, jwt: str +) -> _BrowserSessionKernel: + """Build a sync client sharing the parent's httpx transport; requests use session_base_url.""" + base_q_raw = getattr(parent, "_custom_query", None) + if isinstance(base_q_raw, Mapping): + base_q = {str(k): v for k, v in cast(Mapping[str, object], base_q_raw).items()} + else: + base_q = {} + dq = dict(base_q) + dq["jwt"] = jwt + return _BrowserSessionKernel( + browser_session_id=session_id, + api_key=parent.api_key, + base_url=session_base_url, + timeout=parent.timeout, + max_retries=parent.max_retries, + http_client=parent._client, + default_headers=dict(parent._custom_headers), + default_query=dq, + _strict_response_validation=getattr(parent, "_strict_response_validation", False), + ) + + +def build_async_browser_session_kernel( + parent: AsyncKernel, *, session_id: str, session_base_url: str, jwt: str +) -> _BrowserSessionAsyncKernel: + base_q_raw = getattr(parent, "_custom_query", None) + if isinstance(base_q_raw, Mapping): + base_q = {str(k): v for k, v in cast(Mapping[str, object], base_q_raw).items()} + else: + base_q = {} + dq = dict(base_q) + dq["jwt"] = jwt + return _BrowserSessionAsyncKernel( + browser_session_id=session_id, + api_key=parent.api_key, + base_url=session_base_url, + timeout=parent.timeout, + max_retries=parent.max_retries, + http_client=parent._client, + default_headers=dict(parent._custom_headers), + default_query=dq, + _strict_response_validation=getattr(parent, "_strict_response_validation", False), + ) diff --git a/src/kernel/lib/browser_scoped/client.py b/src/kernel/lib/browser_scoped/client.py new file mode 100644 index 00000000..9575bbcb --- /dev/null +++ b/src/kernel/lib/browser_scoped/client.py @@ -0,0 +1,216 @@ +"""Browser-scoped view over a session: VM subresources and raw HTTP via internal /curl/raw.""" + +from __future__ import annotations + +from typing import IO, TYPE_CHECKING, Any, Mapping, cast +from contextlib import contextmanager, asynccontextmanager +from collections.abc import Iterable, Iterator, AsyncIterator + +import httpx + +from .util import ( + jwt_from_cdp_ws_url, + sanitize_curl_raw_params, + base_url_from_browser_like, + cdp_ws_url_from_browser_like, + session_id_from_browser_like, +) +from ..._types import Body, Timeout, NotGiven, BinaryTypes, not_given +from ..._models import FinalRequestOptions +from .generated_bindings import BrowserScopedFacadeMixin, AsyncBrowserScopedFacadeMixin +from .browser_session_kernel import build_browser_session_kernel, build_async_browser_session_kernel + +if TYPE_CHECKING: + from ..._client import Kernel, AsyncKernel + + +class BrowserScopedClient(BrowserScopedFacadeMixin): + """Session-scoped API: subresources without repeating session id; HTTP via browser /curl/raw.""" + + def __init__(self, parent: Kernel, *, session_id: str, session_base_url: str, jwt: str) -> None: + self._parent = parent + self.session_id = session_id + self._session_base_url = session_base_url + self._jwt = jwt + self._http = build_browser_session_kernel( + parent, session_id=session_id, session_base_url=session_base_url, jwt=jwt + ) + + @property + def parent(self) -> Kernel: + """Control-plane client this view was created from (for future id remapping hooks).""" + return self._parent + + @property + def base_url(self) -> str: + return self._session_base_url + + def request( + self, + method: str, + url: str, + *, + content: BinaryTypes | None = None, + json: Body | None = None, + headers: Mapping[str, str] | None = None, + params: Mapping[str, object] | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + ) -> httpx.Response: + if json is not None and content is not None: + raise TypeError("Passing both `json` and `content` is not supported") + q: dict[str, object] = {**sanitize_curl_raw_params(params), "url": url} + opts = FinalRequestOptions.construct( + method=method.upper(), + url="/curl/raw", + params=q, + headers=_normalize_headers(headers), + content=_normalize_binary_content(content), + json_data=json, + timeout=_normalize_timeout(timeout), + ) + return cast(httpx.Response, self._http.request(httpx.Response, opts)) + + @contextmanager + def stream( + self, + method: str, + url: str, + *, + content: BinaryTypes | None = None, + headers: Mapping[str, str] | None = None, + params: Mapping[str, object] | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + ) -> Iterator[httpx.Response]: + q: dict[str, Any] = dict(self._http.default_query) + q.update(sanitize_curl_raw_params(params)) + q["url"] = url + h = {k: v for k, v in self._http.default_headers.items() if isinstance(v, str)} + if content is None: + h.pop("Content-Type", None) + if headers: + h.update(headers) + eff_timeout = self._http.timeout if isinstance(timeout, NotGiven) else timeout + cm = self._http._client.stream( + method.upper(), + self._http._prepare_url("/curl/raw"), + params=q, + headers=h, + content=_normalize_binary_content(content), + timeout=_normalize_timeout(eff_timeout), + ) + with cm as resp: + yield resp + + +class AsyncBrowserScopedClient(AsyncBrowserScopedFacadeMixin): + def __init__(self, parent: AsyncKernel, *, session_id: str, session_base_url: str, jwt: str) -> None: + self._parent = parent + self.session_id = session_id + self._session_base_url = session_base_url + self._jwt = jwt + self._http = build_async_browser_session_kernel( + parent, session_id=session_id, session_base_url=session_base_url, jwt=jwt + ) + + @property + def parent(self) -> AsyncKernel: + return self._parent + + @property + def base_url(self) -> str: + return self._session_base_url + + async def request( + self, + method: str, + url: str, + *, + content: BinaryTypes | None = None, + json: Body | None = None, + headers: Mapping[str, str] | None = None, + params: Mapping[str, object] | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + ) -> httpx.Response: + if json is not None and content is not None: + raise TypeError("Passing both `json` and `content` is not supported") + q: dict[str, object] = {**sanitize_curl_raw_params(params), "url": url} + opts = FinalRequestOptions.construct( + method=method.upper(), + url="/curl/raw", + params=q, + headers=_normalize_headers(headers), + content=_normalize_binary_content(content), + json_data=json, + timeout=_normalize_timeout(timeout), + ) + return cast(httpx.Response, await self._http.request(httpx.Response, opts)) + + @asynccontextmanager + async def stream( + self, + method: str, + url: str, + *, + content: BinaryTypes | None = None, + headers: Mapping[str, str] | None = None, + params: Mapping[str, object] | None = None, + timeout: float | Timeout | None | NotGiven = not_given, + ) -> AsyncIterator[httpx.Response]: + q: dict[str, Any] = dict(self._http.default_query) + q.update(sanitize_curl_raw_params(params)) + q["url"] = url + h = {k: v for k, v in self._http.default_headers.items() if isinstance(v, str)} + if content is None: + h.pop("Content-Type", None) + if headers: + h.update(headers) + eff_timeout = self._http.timeout if isinstance(timeout, NotGiven) else timeout + async with self._http._client.stream( + method.upper(), + self._http._prepare_url("/curl/raw"), + params=q, + headers=h, + content=_normalize_binary_content(content), + timeout=_normalize_timeout(eff_timeout), + ) as resp: + yield resp + + +def browser_scoped_from_browser(parent: Kernel, browser: Any) -> BrowserScopedClient: + session_id = session_id_from_browser_like(browser) + session_base = base_url_from_browser_like(browser) + if not session_base: + raise ValueError("browser.base_url is required for a browser-scoped client") + jwt = jwt_from_cdp_ws_url(cdp_ws_url_from_browser_like(browser)) + if not jwt: + raise ValueError("could not parse jwt from browser.cdp_ws_url; required for browser session HTTP") + return BrowserScopedClient(parent, session_id=session_id, session_base_url=session_base, jwt=jwt) + + +def async_browser_scoped_from_browser(parent: AsyncKernel, browser: Any) -> AsyncBrowserScopedClient: + session_id = session_id_from_browser_like(browser) + session_base = base_url_from_browser_like(browser) + if not session_base: + raise ValueError("browser.base_url is required for a browser-scoped client") + jwt = jwt_from_cdp_ws_url(cdp_ws_url_from_browser_like(browser)) + if not jwt: + raise ValueError("could not parse jwt from browser.cdp_ws_url; required for browser session HTTP") + return AsyncBrowserScopedClient(parent, session_id=session_id, session_base_url=session_base, jwt=jwt) + + +def _normalize_headers(headers: Mapping[str, str] | None) -> Mapping[str, str]: + return headers if headers is not None else {} + + +def _normalize_timeout(timeout: float | Timeout | None | NotGiven) -> float | Timeout | None: + return None if isinstance(timeout, NotGiven) else timeout + + +def _normalize_binary_content(content: BinaryTypes | None) -> bytes | IO[bytes] | Iterable[bytes] | None: + if content is None: + return None + if isinstance(content, bytearray): + return bytes(content) + if isinstance(content, memoryview): + return content.tobytes() + return content diff --git a/src/kernel/lib/browser_scoped/generated_bindings.py b/src/kernel/lib/browser_scoped/generated_bindings.py new file mode 100644 index 00000000..1a9524c2 --- /dev/null +++ b/src/kernel/lib/browser_scoped/generated_bindings.py @@ -0,0 +1,1946 @@ +# Code generated by scripts/generate_browser_scoped.py. DO NOT EDIT. +# ruff: noqa: I001, F401 +# pyright: reportUnusedImport=false +# mypy: ignore-errors +"""Browser-scoped wrappers over generated `resources.browsers` classes (AST-driven).""" + +from __future__ import annotations + +from typing import Any + +from ..._compat import cached_property +from .util import ScopedResourceProxy + +from typing import Iterable +from typing_extensions import Literal +import httpx +from kernel._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given +from kernel._response import ( + BinaryAPIResponse, + AsyncBinaryAPIResponse, + StreamedBinaryAPIResponse, + AsyncStreamedBinaryAPIResponse, + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + to_custom_raw_response_wrapper, + async_to_streamed_response_wrapper, + to_custom_streamed_response_wrapper, + async_to_custom_raw_response_wrapper, + async_to_custom_streamed_response_wrapper, +) +from kernel.types.browsers import ( + computer_batch_params, + computer_scroll_params, + computer_press_key_params, + computer_type_text_params, + computer_drag_mouse_params, + computer_move_mouse_params, + computer_click_mouse_params, + computer_write_clipboard_params, + computer_capture_screenshot_params, + computer_set_cursor_visibility_params, +) +from kernel.types.browsers.computer_read_clipboard_response import ComputerReadClipboardResponse +from kernel.types.browsers.computer_get_mouse_position_response import ComputerGetMousePositionResponse +from kernel.types.browsers.computer_set_cursor_visibility_response import ComputerSetCursorVisibilityResponse +import os +from typing import Mapping, Iterable, cast +from kernel.resources.browsers.fs.watch import ( + WatchResource, + AsyncWatchResource, + WatchResourceWithRawResponse, + AsyncWatchResourceWithRawResponse, + WatchResourceWithStreamingResponse, + AsyncWatchResourceWithStreamingResponse, +) +from kernel._files import read_file_content, async_read_file_content +from kernel._types import ( + Body, + Omit, + Query, + Headers, + NoneType, + NotGiven, + FileTypes, + BinaryTypes, + FileContent, + AsyncBinaryTypes, + omit, + not_given, +) +from kernel.types.browsers import ( + f_move_params, + f_upload_params, + f_file_info_params, + f_read_file_params, + f_list_files_params, + f_upload_zip_params, + f_write_file_params, + f_delete_file_params, + f_create_directory_params, + f_delete_directory_params, + f_download_dir_zip_params, + f_set_file_permissions_params, +) +from kernel.types.browsers.f_file_info_response import FFileInfoResponse +from kernel.types.browsers.f_list_files_response import FListFilesResponse +from kernel._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given +from kernel._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from kernel._streaming import Stream, AsyncStream +from kernel.types.browsers.fs import watch_start_params +from kernel.types.browsers.fs.watch_start_response import WatchStartResponse +from kernel.types.browsers.fs.watch_events_response import WatchEventsResponse +from kernel._types import Body, Omit, Query, Headers, NotGiven, omit, not_given +from kernel.types.browsers import log_stream_params +from kernel.types.shared.log_event import LogEvent +from kernel.types.browsers import playwright_execute_params +from kernel.types.browsers.playwright_execute_response import PlaywrightExecuteResponse +from typing import Dict, Optional +from kernel._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given +from kernel.types.browsers import ( + process_exec_params, + process_kill_params, + process_spawn_params, + process_stdin_params, + process_resize_params, +) +from kernel.types.browsers.process_exec_response import ProcessExecResponse +from kernel.types.browsers.process_kill_response import ProcessKillResponse +from kernel.types.browsers.process_spawn_response import ProcessSpawnResponse +from kernel.types.browsers.process_stdin_response import ProcessStdinResponse +from kernel.types.browsers.process_resize_response import ProcessResizeResponse +from kernel.types.browsers.process_status_response import ProcessStatusResponse +from kernel.types.browsers.process_stdout_stream_response import ProcessStdoutStreamResponse +from kernel.types.browsers import replay_start_params +from kernel.types.browsers.replay_list_response import ReplayListResponse +from kernel.types.browsers.replay_start_response import ReplayStartResponse + + +class BoundComputerResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def batch( + self, + *, + actions: Iterable[computer_batch_params.Action], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.batch( + self._session_id, + actions=actions, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def capture_screenshot( + self, + *, + region: computer_capture_screenshot_params.Region | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + return self._inner.capture_screenshot( + self._session_id, + region=region, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def click_mouse( + self, + *, + x: int, + y: int, + button: Literal["left", "right", "middle", "back", "forward"] | Omit = omit, + click_type: Literal["down", "up", "click"] | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + num_clicks: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.click_mouse( + self._session_id, + x=x, + y=y, + button=button, + click_type=click_type, + hold_keys=hold_keys, + num_clicks=num_clicks, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def drag_mouse( + self, + *, + path: Iterable[Iterable[int]], + button: Literal["left", "middle", "right"] | Omit = omit, + delay: int | Omit = omit, + duration_ms: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + smooth: bool | Omit = omit, + step_delay_ms: int | Omit = omit, + steps_per_segment: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.drag_mouse( + self._session_id, + path=path, + button=button, + delay=delay, + duration_ms=duration_ms, + hold_keys=hold_keys, + smooth=smooth, + step_delay_ms=step_delay_ms, + steps_per_segment=steps_per_segment, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def get_mouse_position( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerGetMousePositionResponse: + return self._inner.get_mouse_position( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def move_mouse( + self, + *, + x: int, + y: int, + duration_ms: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + smooth: bool | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.move_mouse( + self._session_id, + x=x, + y=y, + duration_ms=duration_ms, + hold_keys=hold_keys, + smooth=smooth, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def press_key( + self, + *, + keys: SequenceNotStr[str], + duration: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.press_key( + self._session_id, + keys=keys, + duration=duration, + hold_keys=hold_keys, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def read_clipboard( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerReadClipboardResponse: + return self._inner.read_clipboard( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def scroll( + self, + *, + x: int, + y: int, + delta_x: int | Omit = omit, + delta_y: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.scroll( + self._session_id, + x=x, + y=y, + delta_x=delta_x, + delta_y=delta_y, + hold_keys=hold_keys, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def set_cursor_visibility( + self, + *, + hidden: bool, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerSetCursorVisibilityResponse: + return self._inner.set_cursor_visibility( + self._session_id, + hidden=hidden, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def type_text( + self, + *, + text: str, + delay: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.type_text( + self._session_id, + text=text, + delay=delay, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def write_clipboard( + self, + *, + text: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.write_clipboard( + self._session_id, + text=text, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncComputerResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def batch( + self, + *, + actions: Iterable[computer_batch_params.Action], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.batch( + self._session_id, + actions=actions, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def capture_screenshot( + self, + *, + region: computer_capture_screenshot_params.Region | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + return await self._inner.capture_screenshot( + self._session_id, + region=region, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def click_mouse( + self, + *, + x: int, + y: int, + button: Literal["left", "right", "middle", "back", "forward"] | Omit = omit, + click_type: Literal["down", "up", "click"] | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + num_clicks: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.click_mouse( + self._session_id, + x=x, + y=y, + button=button, + click_type=click_type, + hold_keys=hold_keys, + num_clicks=num_clicks, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def drag_mouse( + self, + *, + path: Iterable[Iterable[int]], + button: Literal["left", "middle", "right"] | Omit = omit, + delay: int | Omit = omit, + duration_ms: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + smooth: bool | Omit = omit, + step_delay_ms: int | Omit = omit, + steps_per_segment: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.drag_mouse( + self._session_id, + path=path, + button=button, + delay=delay, + duration_ms=duration_ms, + hold_keys=hold_keys, + smooth=smooth, + step_delay_ms=step_delay_ms, + steps_per_segment=steps_per_segment, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def get_mouse_position( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerGetMousePositionResponse: + return await self._inner.get_mouse_position( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def move_mouse( + self, + *, + x: int, + y: int, + duration_ms: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + smooth: bool | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.move_mouse( + self._session_id, + x=x, + y=y, + duration_ms=duration_ms, + hold_keys=hold_keys, + smooth=smooth, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def press_key( + self, + *, + keys: SequenceNotStr[str], + duration: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.press_key( + self._session_id, + keys=keys, + duration=duration, + hold_keys=hold_keys, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def read_clipboard( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerReadClipboardResponse: + return await self._inner.read_clipboard( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def scroll( + self, + *, + x: int, + y: int, + delta_x: int | Omit = omit, + delta_y: int | Omit = omit, + hold_keys: SequenceNotStr[str] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.scroll( + self._session_id, + x=x, + y=y, + delta_x=delta_x, + delta_y=delta_y, + hold_keys=hold_keys, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def set_cursor_visibility( + self, + *, + hidden: bool, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ComputerSetCursorVisibilityResponse: + return await self._inner.set_cursor_visibility( + self._session_id, + hidden=hidden, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def type_text( + self, + *, + text: str, + delay: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.type_text( + self._session_id, + text=text, + delay=delay, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def write_clipboard( + self, + *, + text: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.write_clipboard( + self._session_id, + text=text, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundLogsResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def stream( + self, + *, + source: Literal["path", "supervisor"], + follow: bool | Omit = omit, + path: str | Omit = omit, + supervisor_process: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[LogEvent]: + return self._inner.stream( + self._session_id, + source=source, + follow=follow, + path=path, + supervisor_process=supervisor_process, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncLogsResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def stream( + self, + *, + source: Literal["path", "supervisor"], + follow: bool | Omit = omit, + path: str | Omit = omit, + supervisor_process: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[LogEvent]: + return await self._inner.stream( + self._session_id, + source=source, + follow=follow, + path=path, + supervisor_process=supervisor_process, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundPlaywrightResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def execute( + self, + *, + code: str, + timeout_sec: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PlaywrightExecuteResponse: + return self._inner.execute( + self._session_id, + code=code, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncPlaywrightResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def execute( + self, + *, + code: str, + timeout_sec: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> PlaywrightExecuteResponse: + return await self._inner.execute( + self._session_id, + code=code, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundProcessResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def exec( + self, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessExecResponse: + return self._inner.exec( + self._session_id, + command=command, + args=args, + as_root=as_root, + as_user=as_user, + cwd=cwd, + env=env, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def kill( + self, + process_id: str, + *, + signal: Literal["TERM", "KILL", "INT", "HUP"], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessKillResponse: + return self._inner.kill( + process_id, + id=self._session_id, + signal=signal, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def resize( + self, + process_id: str, + *, + cols: int, + rows: int, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessResizeResponse: + return self._inner.resize( + process_id, + id=self._session_id, + cols=cols, + rows=rows, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def spawn( + self, + *, + command: str, + allocate_tty: bool | Omit = omit, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cols: int | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + rows: int | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessSpawnResponse: + return self._inner.spawn( + self._session_id, + command=command, + allocate_tty=allocate_tty, + args=args, + as_root=as_root, + as_user=as_user, + cols=cols, + cwd=cwd, + env=env, + rows=rows, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def status( + self, + process_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStatusResponse: + return self._inner.status( + process_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def stdin( + self, + process_id: str, + *, + data_b64: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStdinResponse: + return self._inner.stdin( + process_id, + id=self._session_id, + data_b64=data_b64, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def stdout_stream( + self, + process_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[ProcessStdoutStreamResponse]: + return self._inner.stdout_stream( + process_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncProcessResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def exec( + self, + *, + command: str, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessExecResponse: + return await self._inner.exec( + self._session_id, + command=command, + args=args, + as_root=as_root, + as_user=as_user, + cwd=cwd, + env=env, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def kill( + self, + process_id: str, + *, + signal: Literal["TERM", "KILL", "INT", "HUP"], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessKillResponse: + return await self._inner.kill( + process_id, + id=self._session_id, + signal=signal, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def resize( + self, + process_id: str, + *, + cols: int, + rows: int, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessResizeResponse: + return await self._inner.resize( + process_id, + id=self._session_id, + cols=cols, + rows=rows, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def spawn( + self, + *, + command: str, + allocate_tty: bool | Omit = omit, + args: SequenceNotStr[str] | Omit = omit, + as_root: bool | Omit = omit, + as_user: Optional[str] | Omit = omit, + cols: int | Omit = omit, + cwd: Optional[str] | Omit = omit, + env: Dict[str, str] | Omit = omit, + rows: int | Omit = omit, + timeout_sec: Optional[int] | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessSpawnResponse: + return await self._inner.spawn( + self._session_id, + command=command, + allocate_tty=allocate_tty, + args=args, + as_root=as_root, + as_user=as_user, + cols=cols, + cwd=cwd, + env=env, + rows=rows, + timeout_sec=timeout_sec, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def status( + self, + process_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStatusResponse: + return await self._inner.status( + process_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def stdin( + self, + process_id: str, + *, + data_b64: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ProcessStdinResponse: + return await self._inner.stdin( + process_id, + id=self._session_id, + data_b64=data_b64, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def stdout_stream( + self, + process_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[ProcessStdoutStreamResponse]: + return await self._inner.stdout_stream( + process_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundReplaysResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def list( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayListResponse: + return self._inner.list( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def download( + self, + replay_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + return self._inner.download( + replay_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def start( + self, + *, + framerate: int | Omit = omit, + max_duration_in_seconds: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayStartResponse: + return self._inner.start( + self._session_id, + framerate=framerate, + max_duration_in_seconds=max_duration_in_seconds, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def stop( + self, + replay_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.stop( + replay_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncReplaysResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def list( + self, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayListResponse: + return await self._inner.list( + self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def download( + self, + replay_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + return await self._inner.download( + replay_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def start( + self, + *, + framerate: int | Omit = omit, + max_duration_in_seconds: int | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> ReplayStartResponse: + return await self._inner.start( + self._session_id, + framerate=framerate, + max_duration_in_seconds=max_duration_in_seconds, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def stop( + self, + replay_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.stop( + replay_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundWatchResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + def events( + self, + watch_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> Stream[WatchEventsResponse]: + return self._inner.events( + watch_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def start( + self, + *, + path: str, + recursive: bool | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> WatchStartResponse: + return self._inner.start( + self._session_id, + path=path, + recursive=recursive, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def stop( + self, + watch_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.stop( + watch_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncWatchResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + async def events( + self, + watch_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncStream[WatchEventsResponse]: + return await self._inner.events( + watch_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def start( + self, + *, + path: str, + recursive: bool | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> WatchStartResponse: + return await self._inner.start( + self._session_id, + path=path, + recursive=recursive, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def stop( + self, + watch_id: str, + *, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.stop( + watch_id, + id=self._session_id, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundFsResource(ScopedResourceProxy): + """Session id is injected for browser API methods.""" + + @cached_property + def watch(self) -> BoundWatchResource: + from ...resources.browsers.fs.watch import WatchResource + + return BoundWatchResource(WatchResource(self._inner._client), self._session_id) + + def create_directory( + self, + *, + path: str, + mode: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.create_directory( + self._session_id, + path=path, + mode=mode, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def delete_directory( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.delete_directory( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def delete_file( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.delete_file( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def download_dir_zip( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + return self._inner.download_dir_zip( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def file_info( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FFileInfoResponse: + return self._inner.file_info( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def list_files( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FListFilesResponse: + return self._inner.list_files( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def move( + self, + *, + dest_path: str, + src_path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.move( + self._session_id, + dest_path=dest_path, + src_path=src_path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def read_file( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> BinaryAPIResponse: + return self._inner.read_file( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def set_file_permissions( + self, + *, + mode: str, + path: str, + group: str | Omit = omit, + owner: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.set_file_permissions( + self._session_id, + mode=mode, + path=path, + group=group, + owner=owner, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def upload( + self, + *, + files: Iterable[f_upload_params.File], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.upload( + self._session_id, + files=files, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def upload_zip( + self, + *, + dest_path: str, + zip_file: FileTypes, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.upload_zip( + self._session_id, + dest_path=dest_path, + zip_file=zip_file, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + def write_file( + self, + contents: FileContent | BinaryTypes, + *, + path: str, + mode: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return self._inner.write_file( + self._session_id, + contents, + path=path, + mode=mode, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BoundAsyncFsResource(ScopedResourceProxy): + """Async variant: session id is injected for browser API methods.""" + + @cached_property + def watch(self) -> BoundAsyncWatchResource: + from ...resources.browsers.fs.watch import AsyncWatchResource + + return BoundAsyncWatchResource(AsyncWatchResource(self._inner._client), self._session_id) + + async def create_directory( + self, + *, + path: str, + mode: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.create_directory( + self._session_id, + path=path, + mode=mode, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def delete_directory( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.delete_directory( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def delete_file( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.delete_file( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def download_dir_zip( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + return await self._inner.download_dir_zip( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def file_info( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FFileInfoResponse: + return await self._inner.file_info( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def list_files( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> FListFilesResponse: + return await self._inner.list_files( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def move( + self, + *, + dest_path: str, + src_path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.move( + self._session_id, + dest_path=dest_path, + src_path=src_path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def read_file( + self, + *, + path: str, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> AsyncBinaryAPIResponse: + return await self._inner.read_file( + self._session_id, + path=path, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def set_file_permissions( + self, + *, + mode: str, + path: str, + group: str | Omit = omit, + owner: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.set_file_permissions( + self._session_id, + mode=mode, + path=path, + group=group, + owner=owner, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def upload( + self, + *, + files: Iterable[f_upload_params.File], + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.upload( + self._session_id, + files=files, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def upload_zip( + self, + *, + dest_path: str, + zip_file: FileTypes, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.upload_zip( + self._session_id, + dest_path=dest_path, + zip_file=zip_file, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + async def write_file( + self, + contents: FileContent | AsyncBinaryTypes, + *, + path: str, + mode: str | Omit = omit, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = not_given, + ) -> None: + return await self._inner.write_file( + self._session_id, + contents, + path=path, + mode=mode, + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + ) + + +class BrowserScopedFacadeMixin: + """Top-level browser session subresources (sync); uses `_http` and `session_id`.""" + + _http: Any + session_id: str + + @cached_property + def computer(self) -> BoundComputerResource: + from ...resources.browsers.computer import ComputerResource + + return BoundComputerResource(ComputerResource(self._http), self.session_id) + + @cached_property + def fs(self) -> BoundFsResource: + from ...resources.browsers.fs.fs import FsResource + + return BoundFsResource(FsResource(self._http), self.session_id) + + @cached_property + def logs(self) -> BoundLogsResource: + from ...resources.browsers.logs import LogsResource + + return BoundLogsResource(LogsResource(self._http), self.session_id) + + @cached_property + def playwright(self) -> BoundPlaywrightResource: + from ...resources.browsers.playwright import PlaywrightResource + + return BoundPlaywrightResource(PlaywrightResource(self._http), self.session_id) + + @cached_property + def process(self) -> BoundProcessResource: + from ...resources.browsers.process import ProcessResource + + return BoundProcessResource(ProcessResource(self._http), self.session_id) + + @cached_property + def replays(self) -> BoundReplaysResource: + from ...resources.browsers.replays import ReplaysResource + + return BoundReplaysResource(ReplaysResource(self._http), self.session_id) + + +class AsyncBrowserScopedFacadeMixin: + """Top-level browser session subresources (async); uses `_http` and `session_id`.""" + + _http: Any + session_id: str + + @cached_property + def computer(self) -> BoundAsyncComputerResource: + from ...resources.browsers.computer import AsyncComputerResource + + return BoundAsyncComputerResource(AsyncComputerResource(self._http), self.session_id) + + @cached_property + def fs(self) -> BoundAsyncFsResource: + from ...resources.browsers.fs.fs import AsyncFsResource + + return BoundAsyncFsResource(AsyncFsResource(self._http), self.session_id) + + @cached_property + def logs(self) -> BoundAsyncLogsResource: + from ...resources.browsers.logs import AsyncLogsResource + + return BoundAsyncLogsResource(AsyncLogsResource(self._http), self.session_id) + + @cached_property + def playwright(self) -> BoundAsyncPlaywrightResource: + from ...resources.browsers.playwright import AsyncPlaywrightResource + + return BoundAsyncPlaywrightResource(AsyncPlaywrightResource(self._http), self.session_id) + + @cached_property + def process(self) -> BoundAsyncProcessResource: + from ...resources.browsers.process import AsyncProcessResource + + return BoundAsyncProcessResource(AsyncProcessResource(self._http), self.session_id) + + @cached_property + def replays(self) -> BoundAsyncReplaysResource: + from ...resources.browsers.replays import AsyncReplaysResource + + return BoundAsyncReplaysResource(AsyncReplaysResource(self._http), self.session_id) + + +__all__ = [ + "BoundAsyncComputerResource", + "BoundAsyncFsResource", + "BoundAsyncLogsResource", + "BoundAsyncPlaywrightResource", + "BoundAsyncProcessResource", + "BoundAsyncReplaysResource", + "BoundAsyncWatchResource", + "BoundComputerResource", + "BoundFsResource", + "BoundLogsResource", + "BoundPlaywrightResource", + "BoundProcessResource", + "BoundReplaysResource", + "BoundWatchResource", +] diff --git a/src/kernel/lib/browser_scoped/util.py b/src/kernel/lib/browser_scoped/util.py new file mode 100644 index 00000000..bddb6dd1 --- /dev/null +++ b/src/kernel/lib/browser_scoped/util.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +import inspect +from typing import Any, Mapping, cast +from urllib.parse import parse_qs, urlparse + +# Query keys reserved for /curl/raw; user-supplied `params` must not override these. +CURL_RAW_RESERVED_QUERY_KEYS: frozenset[str] = frozenset({"url", "jwt"}) + + +def sanitize_curl_raw_params(params: Mapping[str, object] | None) -> dict[str, object]: + """Drop reserved keys from user params so they cannot override the target URL or auth.""" + if not params: + return {} + return {k: v for k, v in dict(params).items() if k not in CURL_RAW_RESERVED_QUERY_KEYS} + + +def jwt_from_cdp_ws_url(cdp_ws_url: str) -> str | None: + parsed = urlparse(cdp_ws_url) + values = parse_qs(parsed.query).get("jwt") + if not values: + return None + return values[0] + + +def session_id_from_browser_like(browser: Any) -> str: + sid = getattr(browser, "session_id", None) + if isinstance(sid, str) and sid: + return sid + if isinstance(browser, Mapping): + mapping = cast(Mapping[str, object], browser) + m = mapping.get("session_id") + if isinstance(m, str) and m: + return m + raise TypeError("browser object must have a non-empty session_id") + + +def base_url_from_browser_like(browser: Any) -> str | None: + bu = getattr(browser, "base_url", None) + if isinstance(bu, str) and bu.strip(): + return bu.strip().rstrip("/") + "/" + if isinstance(browser, Mapping): + mapping = cast(Mapping[str, object], browser) + raw = mapping.get("base_url") + if isinstance(raw, str) and raw.strip(): + return raw.strip().rstrip("/") + "/" + return None + + +def cdp_ws_url_from_browser_like(browser: Any) -> str: + u = getattr(browser, "cdp_ws_url", None) + if isinstance(u, str) and u: + return u + if isinstance(browser, Mapping): + mapping = cast(Mapping[str, object], browser) + m = mapping.get("cdp_ws_url") + if isinstance(m, str) and m: + return m + raise TypeError("browser object must have a non-empty cdp_ws_url") + + +class ScopedResourceProxy: + """Delegates to a generated resource; injects `id` for callables that still expose it.""" + + def __init__(self, inner: Any, session_id: str) -> None: + object.__setattr__(self, "_inner", inner) + object.__setattr__(self, "_session_id", session_id) + + def __getattr__(self, name: str) -> Any: + if name.startswith("_"): + raise AttributeError(name) + attr = getattr(self._inner, name) + if name.startswith("with_") or not callable(attr): + return attr + try: + sig = inspect.signature(attr) + except (TypeError, ValueError): + return attr + if "id" not in sig.parameters: + return attr + + def bound(*args: Any, **kwargs: Any) -> Any: + kw = dict(kwargs) + kw["id"] = self._session_id + return attr(*args, **kw) + + return bound diff --git a/src/kernel/types/browser_create_response.py b/src/kernel/types/browser_create_response.py index 9356bb05..a793eb2f 100644 --- a/src/kernel/types/browser_create_response.py +++ b/src/kernel/types/browser_create_response.py @@ -36,7 +36,7 @@ class BrowserCreateResponse(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/src/kernel/types/browser_list_response.py b/src/kernel/types/browser_list_response.py index f3a88f29..43e60cd1 100644 --- a/src/kernel/types/browser_list_response.py +++ b/src/kernel/types/browser_list_response.py @@ -36,7 +36,7 @@ class BrowserListResponse(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/src/kernel/types/browser_pool_acquire_response.py b/src/kernel/types/browser_pool_acquire_response.py index 064c405d..ea37ba65 100644 --- a/src/kernel/types/browser_pool_acquire_response.py +++ b/src/kernel/types/browser_pool_acquire_response.py @@ -36,7 +36,7 @@ class BrowserPoolAcquireResponse(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/src/kernel/types/browser_retrieve_response.py b/src/kernel/types/browser_retrieve_response.py index 5b5a8913..c56d159a 100644 --- a/src/kernel/types/browser_retrieve_response.py +++ b/src/kernel/types/browser_retrieve_response.py @@ -36,7 +36,7 @@ class BrowserRetrieveResponse(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/src/kernel/types/browser_update_response.py b/src/kernel/types/browser_update_response.py index 188895ad..325f8f1f 100644 --- a/src/kernel/types/browser_update_response.py +++ b/src/kernel/types/browser_update_response.py @@ -36,7 +36,7 @@ class BrowserUpdateResponse(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/src/kernel/types/invocation_list_browsers_response.py b/src/kernel/types/invocation_list_browsers_response.py index 23eda779..e99b5087 100644 --- a/src/kernel/types/invocation_list_browsers_response.py +++ b/src/kernel/types/invocation_list_browsers_response.py @@ -36,7 +36,7 @@ class Browser(BaseModel): """Websocket URL for WebDriver BiDi connections to the browser session""" base_url: Optional[str] = None - """Metro-API HTTP base URL for this browser session.""" + """HTTP base URL for this browser session (browser VM / session proxy).""" browser_live_view_url: Optional[str] = None """Remote URL for live viewing the browser session. diff --git a/tests/test_browser_scoped.py b/tests/test_browser_scoped.py new file mode 100644 index 00000000..5bbdafb5 --- /dev/null +++ b/tests/test_browser_scoped.py @@ -0,0 +1,136 @@ +from __future__ import annotations + +import os +import json +from typing import Any, cast + +import httpx +import respx +import pytest + +from kernel import Kernel +from kernel.lib.browser_scoped.util import jwt_from_cdp_ws_url + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") +api_key = "sk-123" + + +def _fake_browser() -> dict[str, object]: + return { + "session_id": "sess-1", + "base_url": "http://browser-session.test/browser/kernel", + "cdp_ws_url": "wss://browser-session.test/browser/cdp?jwt=token-abc", + "webdriver_ws_url": "wss://x", + "created_at": "2020-01-01T00:00:00Z", + "headless": True, + "stealth": False, + "timeout_seconds": 60, + } + + +def test_jwt_from_cdp_ws_url() -> None: + assert jwt_from_cdp_ws_url("wss://h/browser/cdp?jwt=abc%2Fdef&x=1") == "abc/def" + + +@respx.mock +def test_for_browser_process_exec_routes_to_session_base() -> None: + route = respx.post("http://browser-session.test/browser/kernel/process/exec?jwt=token-abc").mock( + return_value=httpx.Response( + 200, + json={ + "exit_code": 0, + "stdout_b64": "", + "stderr_b64": "", + }, + ) + ) + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + b = client.for_browser(_fake_browser()) + out = b.process.exec(command="echo", args=["hi"]) + assert route.called + call = cast(Any, route.calls[0]) + request = cast(httpx.Request, call.request) + sent = request.read().decode() + body = json.loads(sent) + assert body["command"] == "echo" + assert body["args"] == ["hi"] + assert out.exit_code == 0 + + +@respx.mock +def test_browser_request_uses_curl_raw() -> None: + route = respx.get("http://browser-session.test/browser/kernel/curl/raw").mock( + return_value=httpx.Response(200, content=b"ok") + ) + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + b = client.for_browser(_fake_browser()) + r = b.request("GET", "https://example.com", params={"timeout_ms": 5000}) + assert r.status_code == 200 + assert r.content == b"ok" + assert route.called + call = cast(Any, route.calls[0]) + request = cast(httpx.Request, call.request) + assert "curl/raw" in str(request.url) + assert "jwt=token-abc" in str(request.url) + + +@respx.mock +def test_browser_request_params_cannot_override_target_url_or_jwt() -> None: + route = respx.get("http://browser-session.test/browser/kernel/curl/raw").mock( + return_value=httpx.Response(200, content=b"ok") + ) + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + b = client.for_browser(_fake_browser()) + b.request( + "GET", + "https://example.com", + params={"url": "https://evil.example", "jwt": "other", "timeout_ms": 1}, + ) + assert route.called + call = cast(Any, route.calls[0]) + request = cast(httpx.Request, call.request) + req_url = request.url + assert str(req_url.params.get("url")) == "https://example.com" + assert str(req_url.params.get("jwt")) == "token-abc" + assert str(req_url.params.get("timeout_ms")) == "1" + + +@respx.mock +def test_browser_stream_params_cannot_override_target_url_or_jwt() -> None: + route = respx.get("http://browser-session.test/browser/kernel/curl/raw").mock( + return_value=httpx.Response(200, content=b"streamed") + ) + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + b = client.for_browser(_fake_browser()) + with b.stream( + "GET", + "https://example.com", + params={"url": "https://evil.example", "jwt": "other"}, + ) as resp: + assert resp.status_code == 200 + assert resp.read() == b"streamed" + assert route.called + call = cast(Any, route.calls[0]) + request = cast(httpx.Request, call.request) + req_url = request.url + assert str(req_url.params.get("url")) == "https://example.com" + assert str(req_url.params.get("jwt")) == "token-abc" + + +@respx.mock +def test_browser_stream_reads_body() -> None: + respx.get("http://browser-session.test/browser/kernel/curl/raw").mock( + return_value=httpx.Response(200, content=b"streamed") + ) + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + b = client.for_browser(_fake_browser()) + with b.stream("GET", "https://example.com") as resp: + assert resp.status_code == 200 + assert resp.read() == b"streamed" + + +def test_for_browser_requires_base_url() -> None: + bad = {**_fake_browser(), "base_url": None} + with Kernel(base_url=base_url, api_key=api_key, _strict_response_validation=True) as client: + with pytest.raises(ValueError, match="base_url"): + client.for_browser(bad)