diff --git a/qortal_api_tool.py b/qortal_api_tool.py new file mode 100644 index 0000000..b4493be --- /dev/null +++ b/qortal_api_tool.py @@ -0,0 +1,147 @@ + +""" +title: Qortal API Pipeline +author: crowe & ChatGPT +git_url: https://github.com/crowetic/qortal-api-tool +description: Native Open WebUI tool that lets the LLM call any Qortal Core HTTP endpoint, + with automatic discovery and validation against the node's openapi.json. +required_open_webui_version: 0.5.0 +version: 0.2.0 +licence: MIT +""" + +import os +import json +import requests +from typing import Any, Dict, Optional, List + +# --------------------------------------------------------------------------- +# Configuration +# --------------------------------------------------------------------------- +DEFAULT_QORTAL_URL = os.getenv("QORTAL_API_URL", "https://api.qortal.org").rstrip("/") +OPENAPI_URL = os.getenv("QORTAL_OPENAPI_URL", f"{DEFAULT_QORTAL_URL}/openapi.json") + +# --------------------------------------------------------------------------- +class Tools: + """ + ⚙️ Qortal API Toolkit for Open WebUI + ------------------------------------------------- + Generic wrappers + dynamic endpoint list pulled from openapi.json. + Methods exposed to the LLM: + • list_get_endpoints() – list of all available GET endpoints + • qortal_get() – perform validated HTTP GET + • qortal_post() – perform HTTP POST + """ + + # ---------------------- internal helpers ------------------------------ # + _openapi_cache: Optional[Dict[str, Any]] = None + + def _build_url(self, endpoint: str, path_params: Optional[Dict[str, str]] = None) -> str: + """Replace {tokens} in endpoint and join with base URL.""" + cleaned = f"/{endpoint.lstrip('/')}" # ensure single leading slash + if path_params: + for k, v in path_params.items(): + cleaned = cleaned.replace("{" + k + "}", str(v)) + return DEFAULT_QORTAL_URL + cleaned + + # ---------------------- OpenAPI helpers ------------------------------- # + def _load_openapi(self) -> Dict[str, Any]: + if self._openapi_cache is not None: + return self._openapi_cache + try: + resp = requests.get(OPENAPI_URL, timeout=30) + resp.raise_for_status() + self._openapi_cache = resp.json() + except Exception as e: + self._openapi_cache = {} + return self._openapi_cache + + def list_get_endpoints(self) -> List[str]: + """Return all GET endpoints available on this Qortal node.""" + spec = self._load_openapi() + paths = spec.get("paths", {}) + return [p for p, verbs in paths.items() if "get" in verbs] + + def _is_valid_get(self, endpoint: str) -> bool: + endpoint = "/" + endpoint.lstrip("/") + return endpoint in self.list_get_endpoints() + + # -------------------------- request core ------------------------------ # + def _request( + self, + method: str, + endpoint: str, + path_params: Optional[Dict[str, Any]] = None, + query_params: Optional[Dict[str, Any]] = None, + json_body: Optional[Dict[str, Any]] = None, + validate_get: bool = True, + ) -> Dict[str, Any]: + if method.upper() == "GET" and validate_get and not self._is_valid_get(endpoint): + return { + "success": False, + "error": f"Endpoint '{endpoint}' is not listed as GET in node's OpenAPI spec", + "url": None, + } + + url = self._build_url(endpoint, path_params) + try: + resp = requests.request( + method=method.upper(), + url=url, + params=query_params, + json=json_body, + timeout=30, + ) + try: + data = resp.json() + except ValueError: + data = resp.text + return { + "status_code": resp.status_code, + "success": resp.ok, + "url": resp.url, + "data": data, + } + except Exception as e: + return {"success": False, "error": str(e), "url": url} + + # -------------------------- PUBLIC TOOLS ----------------------------- # + def qortal_get( + self, + endpoint: str, + path_params: Optional[dict] = None, + query_params: Optional[dict] = None, + ) -> dict: + """ + Generic HTTP GET to Qortal Core. + + Parameters + ---------- + endpoint : str + Endpoint path (e.g. "/addresses/balance/{address}") + path_params : dict, optional + Dict for replacement of {tokens} in endpoint. + query_params : dict, optional + Dict for URL query parameters. + """ + return self._request("GET", endpoint, path_params, query_params) + + def qortal_post( + self, + endpoint: str, + path_params: Optional[dict] = None, + json_body: Optional[dict] = None, + ) -> dict: + """ + Generic HTTP POST to Qortal Core. + + Parameters + ---------- + endpoint : str + Endpoint path (e.g. "/arbitrary/publish") + path_params : dict, optional + Dict for replacement of {tokens} in endpoint. + json_body : dict, optional + Dict to send as JSON body. + """ + return self._request("POST", endpoint, path_params, None, json_body, validate_get=False) diff --git a/setup-ai-stack.sh b/setup-ai-stack.sh index 5148677..00ed9c0 100644 --- a/setup-ai-stack.sh +++ b/setup-ai-stack.sh @@ -71,6 +71,7 @@ if [[ "$1" == "--update" ]]; then docker run -d -p 3000:8080 --gpus all \ -e OPENAI_API_BASE_URL=http://pipelines:9099 \ -e OPENAI_API_KEY=0p3n-w3bu! \ + -e OLLAMA_MAX_LOADED_MODELS=2 \ -v ollama:/root/.ollama \ -v open-webui:/app/backend/data \ --name open-webui \ @@ -97,6 +98,7 @@ if [[ "$1" == "--update" ]]; then -e ENABLE_BACKENDS=llama-cuda,ollama \ -e INCLUDE_DEFAULT_MODELS=true \ -e AUTOLOAD_MODELS=true \ + -e MODEL_IDLE_TIMEOUT=900 \ -e MODEL_PATH=/models \ --restart unless-stopped \ localai/localai:latest-aio-gpu-nvidia-cuda-12 @@ -206,7 +208,7 @@ if [[ "$1" != "--update" ]]; then docker run -d -p 3000:8080 --gpus all \ -e OPENAI_API_BASE_URL=http://pipelines:9099 \ -e OPENAI_API_KEY=0p3n-w3bu! \ - -e OLLAMA_MAX_LOADED_MODELS=1 \ + -e OLLAMA_MAX_LOADED_MODELS=2 \ -v ollama:/root/.ollama \ -v open-webui:/app/backend/data \ --name open-webui \