diff --git a/.agent/configs/skill-sources.json b/.agent/configs/skill-sources.json index 4ab4f0103..bc64ddd77 100644 --- a/.agent/configs/skill-sources.json +++ b/.agent/configs/skill-sources.json @@ -60,6 +60,28 @@ "last_checked": "2026-01-24T12:00:00Z", "merge_strategy": "added", "notes": "Includes 18 rule files in .agent/tools/video/heygen-skill/rules/" + }, + { + "name": "caldav-calendar", + "upstream_url": "https://clawdhub.com/Asleep123/caldav-calendar", + "upstream_commit": "1.0.1", + "local_path": ".agent/tools/productivity/caldav-calendar-skill.md", + "format_detected": "clawdhub", + "imported_at": "2026-01-24T22:20:00Z", + "last_checked": "2026-01-24T22:20:00Z", + "merge_strategy": "added", + "notes": "ClawdHub v1.0.1 by @Asleep123 - CalDAV calendar sync via vdirsyncer + khal" + }, + { + "name": "proxmox-full", + "upstream_url": "https://clawdhub.com/mSarheed/proxmox-full", + "upstream_commit": "1.0.0", + "local_path": ".agent/services/hosting/proxmox-full-skill.md", + "format_detected": "clawdhub", + "imported_at": "2026-01-24T22:20:00Z", + "last_checked": "2026-01-24T22:20:00Z", + "merge_strategy": "added", + "notes": "ClawdHub v1.0.0 by @mSarheed - Complete Proxmox VE management via REST API" } ] } diff --git a/.agent/scripts/add-skill-helper.sh b/.agent/scripts/add-skill-helper.sh index c2f9b02c8..9427e6a0e 100755 --- a/.agent/scripts/add-skill-helper.sh +++ b/.agent/scripts/add-skill-helper.sh @@ -2,11 +2,11 @@ # ============================================================================= # Add External Skill Helper # ============================================================================= -# Import external skills from GitHub repos, convert to aidevops format, -# handle conflicts, and track upstream sources for update detection. +# Import external skills from GitHub repos or ClawdHub, convert to aidevops +# format, handle conflicts, and track upstream sources for update detection. # # Usage: -# add-skill-helper.sh add [--name ] [--force] +# add-skill-helper.sh add [--name ] [--force] # add-skill-helper.sh list # add-skill-helper.sh check-updates # add-skill-helper.sh remove @@ -16,6 +16,8 @@ # add-skill-helper.sh add dmmulroy/cloudflare-skill # add-skill-helper.sh add https://github.com/anthropics/skills/pdf # add-skill-helper.sh add vercel-labs/agent-skills --name vercel +# add-skill-helper.sh add clawdhub:caldav-calendar +# add-skill-helper.sh add https://clawdhub.com/Asleep123/caldav-calendar # add-skill-helper.sh check-updates # ============================================================================= @@ -59,17 +61,17 @@ log_error() { show_help() { cat << 'EOF' -Add External Skill Helper - Import skills from GitHub to aidevops +Add External Skill Helper - Import skills from GitHub or ClawdHub to aidevops USAGE: add-skill-helper.sh [options] COMMANDS: - add Import a skill from GitHub - list List all imported skills - check-updates Check for upstream updates - remove Remove an imported skill - help Show this help message + add Import a skill + list List all imported skills + check-updates Check for upstream updates + remove Remove an imported skill + help Show this help message OPTIONS: --name Override the skill name @@ -86,11 +88,21 @@ EXAMPLES: # Import with custom name add-skill-helper.sh add vercel-labs/agent-skills --name vercel-deploy + # Import from ClawdHub (shorthand) + add-skill-helper.sh add clawdhub:caldav-calendar + + # Import from ClawdHub (full URL) + add-skill-helper.sh add https://clawdhub.com/Asleep123/caldav-calendar + # Check all imported skills for updates add-skill-helper.sh check-updates +SUPPORTED SOURCES: + - GitHub repos (owner/repo or full URL) + - ClawdHub registry (clawdhub:slug or clawdhub.com URL) + SUPPORTED FORMATS: - - SKILL.md (OpenSkills/Claude Code format) + - SKILL.md (OpenSkills/Claude Code/ClawdHub format) - AGENTS.md (aidevops/Windsurf format) - .cursorrules (Cursor format) - Raw markdown files @@ -279,6 +291,10 @@ determine_target_path() { category="tools/credentials" elif echo "$content" | grep -qi "vercel\|coolify\|docker\|kubernetes"; then category="tools/deployment" + elif echo "$content" | grep -qi "proxmox\|hypervisor\|virtualization\|vm.management"; then + category="services/hosting" + elif echo "$content" | grep -qi "calendar\|caldav\|ical\|scheduling"; then + category="tools/productivity" elif echo "$content" | grep -qi "dns\|hosting\|domain"; then category="services/hosting" fi @@ -483,7 +499,31 @@ cmd_add() { esac done - log_info "Parsing URL: $url" + log_info "Parsing source: $url" + + # Detect ClawdHub source (clawdhub:slug or clawdhub.com URL) + local is_clawdhub=false + local clawdhub_slug="" + + if [[ "$url" == clawdhub:* ]]; then + is_clawdhub=true + clawdhub_slug="${url#clawdhub:}" + elif [[ "$url" == *clawdhub.com* ]]; then + is_clawdhub=true + # Strip URL prefix and extract slug (last path segment) + clawdhub_slug="${url#*clawdhub.com/}" + clawdhub_slug="${clawdhub_slug#/}" + clawdhub_slug="${clawdhub_slug%/}" + # If format is owner/slug, take just the slug + if [[ "$clawdhub_slug" == */* ]]; then + clawdhub_slug="${clawdhub_slug##*/}" + fi + fi + + if [[ "$is_clawdhub" == true ]]; then + cmd_add_clawdhub "$clawdhub_slug" "$custom_name" "$force" "$dry_run" + return $? + fi # Parse GitHub URL local parsed @@ -491,8 +531,8 @@ cmd_add() { IFS='|' read -r owner repo subpath <<< "$parsed" if [[ -z "$owner" || -z "$repo" ]]; then - log_error "Could not parse GitHub URL: $url" - log_info "Expected format: owner/repo or https://github.com/owner/repo" + log_error "Could not parse source URL: $url" + log_info "Expected: owner/repo, https://github.com/owner/repo, or clawdhub:slug" return 1 fi @@ -724,6 +764,161 @@ cmd_add() { return 0 } +# Import a skill from ClawdHub registry +cmd_add_clawdhub() { + local slug="$1" + local custom_name="$2" + local force="$3" + local dry_run="$4" + + if [[ -z "$slug" ]]; then + log_error "ClawdHub slug required" + return 1 + fi + + log_info "Importing from ClawdHub: $slug" + + # Get skill metadata from API + local api_response + api_response=$(curl -s --connect-timeout 10 --max-time 30 "${CLAWDHUB_API:-https://clawdhub.com/api/v1}/skills/${slug}" 2>/dev/null) + + if [[ -z "$api_response" ]] || ! echo "$api_response" | python3 -c "import sys,json; json.load(sys.stdin)" 2>/dev/null; then + log_error "Could not fetch skill info from ClawdHub API: $slug" + return 1 + fi + + # Extract metadata + local display_name summary owner_handle version + display_name=$(echo "$api_response" | python3 -c "import sys,json; print(json.load(sys.stdin).get('skill',{}).get('displayName',''))" 2>/dev/null) + summary=$(echo "$api_response" | python3 -c "import sys,json; print(json.load(sys.stdin).get('skill',{}).get('summary',''))" 2>/dev/null) + owner_handle=$(echo "$api_response" | python3 -c "import sys,json; print(json.load(sys.stdin).get('owner',{}).get('handle',''))" 2>/dev/null) + version=$(echo "$api_response" | python3 -c "import sys,json; print(json.load(sys.stdin).get('latestVersion',{}).get('version',''))" 2>/dev/null) + + log_info "Found: $display_name v${version} by @${owner_handle}" + + # Determine skill name + local skill_name + if [[ -n "$custom_name" ]]; then + skill_name=$(to_kebab_case "$custom_name") + else + skill_name=$(to_kebab_case "$slug") + fi + + # Determine target path + local target_path + target_path=$(determine_target_path "$skill_name" "$summary" ".") + log_info "Target path: .agent/$target_path" + + # Check for conflicts + local conflicts + conflicts=$(check_conflicts "$target_path" ".agent") || true + if [[ -n "$conflicts" ]]; then + local blocking_conflicts + blocking_conflicts=$(echo "$conflicts" | grep -v "^INFO:" || true) + + if [[ -n "$blocking_conflicts" && "$force" != true ]]; then + log_warning "Conflicts detected:" + echo "$blocking_conflicts" | while read -r conflict; do + echo " - ${conflict#*: }" + done + echo "" + echo "Options:" + echo " 1. Replace (overwrite existing)" + echo " 2. Separate (use different name)" + echo " 3. Skip (cancel import)" + echo "" + read -rp "Choose option [1-3]: " choice + + case "$choice" in + 1) log_info "Replacing existing..." ;; + 2) + read -rp "Enter new name: " new_name + skill_name=$(to_kebab_case "$new_name") + target_path=$(determine_target_path "$skill_name" "$summary" ".") + ;; + 3|*) log_info "Import cancelled"; return 0 ;; + esac + fi + fi + + if [[ "$dry_run" == true ]]; then + log_info "DRY RUN - Would create:" + echo " .agent/${target_path}.md" + return 0 + fi + + # Fetch SKILL.md content using clawdhub-helper.sh (Playwright-based) + local helper_script + helper_script="$(dirname "$0")/clawdhub-helper.sh" + local fetch_dir="${TMPDIR:-/tmp}/clawdhub-fetch/${slug}" + + rm -rf "$fetch_dir" + + if [[ -x "$helper_script" ]]; then + if ! "$helper_script" fetch "$slug" --output "$fetch_dir"; then + log_error "Failed to fetch SKILL.md from ClawdHub" + return 1 + fi + else + log_error "clawdhub-helper.sh not found at: $helper_script" + return 1 + fi + + # Verify SKILL.md was fetched + if [[ ! -f "$fetch_dir/SKILL.md" || ! -s "$fetch_dir/SKILL.md" ]]; then + log_error "SKILL.md not found or empty after fetch" + return 1 + fi + + # Create target directory + local target_dir + target_dir=".agent/$(dirname "$target_path")" + mkdir -p "$target_dir" + + # Convert to aidevops format + local target_file=".agent/${target_path}.md" + + # Write aidevops-style header + local safe_summary + safe_summary=$(printf '%s' "${summary:-Imported from ClawdHub}" | sed 's/\\/\\\\/g; s/"/\\"/g') + + cat > "$target_file" << EOF +--- +description: "${safe_summary}" +mode: subagent +imported_from: clawdhub +clawdhub_slug: "${slug}" +clawdhub_version: "${version}" +--- +# ${display_name:-$skill_name} + +EOF + + # Append the fetched SKILL.md content (skip any existing frontmatter) + awk ' + BEGIN { in_frontmatter = 0; after_frontmatter = 0; has_frontmatter = 0 } + NR == 1 && /^---$/ { in_frontmatter = 1; has_frontmatter = 1; next } + in_frontmatter && /^---$/ { in_frontmatter = 0; after_frontmatter = 1; next } + in_frontmatter { next } + !has_frontmatter || after_frontmatter { print } + ' "$fetch_dir/SKILL.md" >> "$target_file" + + log_success "Created: $target_file" + + # Register in skill-sources.json + local upstream_url="https://clawdhub.com/${owner_handle}/${slug}" + register_skill "$skill_name" "$upstream_url" ".agent/${target_path}.md" "clawdhub" "$version" "added" "ClawdHub v${version} by @${owner_handle}" + + # Cleanup + rm -rf "$fetch_dir" + + log_success "Skill '$skill_name' imported from ClawdHub successfully" + echo "" + log_info "Run './setup.sh' to create symlinks for other AI assistants" + + return 0 +} + cmd_list() { ensure_skill_sources diff --git a/.agent/scripts/clawdhub-helper.sh b/.agent/scripts/clawdhub-helper.sh new file mode 100755 index 000000000..5353ee6ef --- /dev/null +++ b/.agent/scripts/clawdhub-helper.sh @@ -0,0 +1,489 @@ +#!/bin/bash +# ============================================================================= +# ClawdHub Helper - Fetch skills from clawdhub.com using browser automation +# ============================================================================= +# Uses Playwright to extract SKILL.md content from ClawdHub's SPA since the +# API doesn't expose raw file content. Falls back to clawdhub CLI if available. +# +# Usage: +# clawdhub-helper.sh fetch [--output ] +# clawdhub-helper.sh search +# clawdhub-helper.sh info +# clawdhub-helper.sh help +# +# Examples: +# clawdhub-helper.sh fetch caldav-calendar +# clawdhub-helper.sh fetch proxmox-full --output /tmp/skill +# clawdhub-helper.sh search "calendar" +# clawdhub-helper.sh info caldav-calendar +# ============================================================================= + +set -euo pipefail + +# Configuration +CLAWDHUB_BASE_URL="https://clawdhub.com" +CLAWDHUB_API="${CLAWDHUB_BASE_URL}/api/v1" +TEMP_DIR="${TMPDIR:-/tmp}/clawdhub-fetch" + +# Colors +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' + +log_info() { + echo -e "${BLUE}[clawdhub]${NC} $1" + return 0 +} + +log_success() { + echo -e "${GREEN}[OK]${NC} $1" + return 0 +} + +log_warning() { + echo -e "${YELLOW}[WARN]${NC} $1" + return 0 +} + +log_error() { + echo -e "${RED}[ERROR]${NC} $1" + return 0 +} + +show_help() { + cat << 'EOF' +ClawdHub Helper - Fetch skills from clawdhub.com using browser automation + +USAGE: + clawdhub-helper.sh [options] + +COMMANDS: + fetch Download skill SKILL.md via Playwright + search Search skills via ClawdHub vector search API + info Show skill metadata from API + help Show this help message + +OPTIONS: + --output Output directory (default: /tmp/clawdhub-fetch/) + +EXAMPLES: + # Fetch a skill's SKILL.md content + clawdhub-helper.sh fetch caldav-calendar + + # Fetch to specific directory + clawdhub-helper.sh fetch proxmox-full --output ./skills/proxmox + + # Search for skills + clawdhub-helper.sh search "kubernetes" + + # Get skill metadata + clawdhub-helper.sh info caldav-calendar + +SUPPORTED URL FORMATS: + clawdhub-helper.sh fetch caldav-calendar + clawdhub-helper.sh fetch owner/slug + clawdhub-helper.sh fetch https://clawdhub.com/owner/slug + +NOTES: + - Requires Playwright (npx playwright) for fetch command + - Falls back to clawdhub CLI if installed (npx clawdhub install) + - Search uses ClawdHub's vector/semantic search API + - API endpoints used: /api/v1/skills/{slug}, /api/search?q={query} +EOF + return 0 +} + +# Parse ClawdHub URL or shorthand into slug +# Accepts: "slug", "owner/slug", "https://clawdhub.com/owner/slug" +parse_clawdhub_input() { + local input="$1" + + # Strip URL prefix + input="${input#https://clawdhub.com/}" + input="${input#http://clawdhub.com/}" + input="${input#clawdhub.com/}" + + # Strip leading/trailing slashes + input="${input#/}" + input="${input%/}" + + # If format is "owner/slug", extract just the slug (last segment) + if [[ "$input" == */* ]]; then + echo "${input##*/}" + else + echo "$input" + fi + return 0 +} + +# Fetch skill metadata from API +fetch_skill_info() { + local slug="$1" + + local response + response=$(curl -s --connect-timeout 10 --max-time 30 "${CLAWDHUB_API}/skills/${slug}") + + if echo "$response" | python3 -c "import sys,json; json.load(sys.stdin)" 2>/dev/null; then + echo "$response" + else + log_error "Failed to fetch skill info for: $slug" + return 1 + fi + return 0 +} + +# Extract SKILL.md content using Playwright +# The ClawdHub SPA renders SKILL.md as HTML on the skill detail page. +# We use Playwright to navigate, wait for render, and extract the markdown content. +fetch_skill_content_playwright() { + local slug="$1" + local output_dir="$2" + + mkdir -p "$output_dir" + + # First get the owner handle from API to construct the full URL + local info + info=$(fetch_skill_info "$slug") || return 1 + + local owner + owner=$(echo "$info" | python3 -c "import sys,json; print(json.load(sys.stdin).get('owner',{}).get('handle',''))" 2>/dev/null) + + if [[ -z "$owner" ]]; then + log_error "Could not determine owner for skill: $slug" + return 1 + fi + + local skill_url="${CLAWDHUB_BASE_URL}/${owner}/${slug}" + log_info "Fetching SKILL.md from: $skill_url" + + # Create a temporary Node.js project with Playwright to extract SKILL.md + local pw_dir + pw_dir=$(mktemp -d "${TMPDIR:-/tmp}/clawdhub-pw-XXXXXX") + + # Create package.json for the temporary project + cat > "$pw_dir/package.json" << 'PKGJSON' +{"name":"clawdhub-fetch","private":true,"type":"module","dependencies":{"playwright":"^1.50.0"}} +PKGJSON + + # Create the extraction script + cat > "$pw_dir/fetch.mjs" << 'PLAYWRIGHT_SCRIPT' +import { chromium } from 'playwright'; +import { writeFileSync } from 'fs'; + +const url = process.argv[2]; +const outputFile = process.argv[3]; + +if (!url || !outputFile) { + console.error('Usage: node fetch.mjs '); + process.exit(1); +} + +const browser = await chromium.launch({ headless: true }); +const page = await browser.newPage(); + +try { + await page.goto(url, { waitUntil: 'networkidle', timeout: 30000 }); + + // Wait for the skill content to render + await page.waitForSelector('[class*="prose"], [class*="markdown"], article, .skill-content', { + timeout: 10000 + }).catch(() => {}); + + // Extract rendered HTML and convert back to markdown + let content = await page.evaluate(() => { + const proseEl = document.querySelector('[class*="prose"]') + || document.querySelector('[class*="markdown"]') + || document.querySelector('article') + || document.querySelector('.skill-content'); + + if (!proseEl) return null; + + const lines = []; + const walk = (node) => { + if (node.nodeType === Node.TEXT_NODE) { + const text = node.textContent; + if (text.trim()) lines.push(text); + return; + } + if (node.nodeType !== Node.ELEMENT_NODE) return; + + const tag = node.tagName.toLowerCase(); + + if (tag === 'h1') { lines.push('\n# ' + node.textContent.trim()); return; } + if (tag === 'h2') { lines.push('\n## ' + node.textContent.trim()); return; } + if (tag === 'h3') { lines.push('\n### ' + node.textContent.trim()); return; } + if (tag === 'h4') { lines.push('\n#### ' + node.textContent.trim()); return; } + + if (tag === 'pre') { + const code = node.querySelector('code'); + const lang = code?.className?.match(/language-(\w+)/)?.[1] || ''; + lines.push('\n```' + lang); + lines.push((code || node).textContent.trimEnd()); + lines.push('```\n'); + return; + } + + if (tag === 'code' && node.parentElement?.tagName !== 'PRE') { + lines.push('`' + node.textContent + '`'); + return; + } + + if (tag === 'p') { + const children = []; + for (const child of node.childNodes) { + if (child.nodeType === Node.TEXT_NODE) children.push(child.textContent); + else if (child.tagName === 'CODE') children.push('`' + child.textContent + '`'); + else if (child.tagName === 'STRONG' || child.tagName === 'B') children.push('**' + child.textContent + '**'); + else if (child.tagName === 'EM' || child.tagName === 'I') children.push('*' + child.textContent + '*'); + else if (child.tagName === 'A') children.push('[' + child.textContent + '](' + child.href + ')'); + else children.push(child.textContent); + } + lines.push('\n' + children.join('')); + return; + } + + if (tag === 'ul' || tag === 'ol') { + let idx = 0; + for (const li of node.children) { + idx++; + const prefix = tag === 'ol' ? `${idx}. ` : '- '; + lines.push(prefix + li.textContent.trim()); + } + lines.push(''); + return; + } + + if (tag === 'hr') { lines.push('\n---\n'); return; } + if (tag === 'br') { lines.push(''); return; } + if (tag === 'blockquote') { lines.push('> ' + node.textContent.trim()); return; } + + for (const child of node.childNodes) { walk(child); } + }; + + walk(proseEl); + return lines.join('\n'); + }); + + if (!content || content.trim().length < 50) { + content = await page.evaluate(() => { + const main = document.querySelector('main') || document.body; + return main.innerText; + }); + } + + writeFileSync(outputFile, content.trim() + '\n'); + console.log('OK'); +} catch (err) { + console.error('Error:', err.message); + process.exit(1); +} finally { + await browser.close(); +} +PLAYWRIGHT_SCRIPT + + local output_file="${output_dir}/SKILL.md" + + # Install playwright and run the fetch script + log_info "Installing Playwright (temporary)..." + if (cd "$pw_dir" && npm install --silent 2>/dev/null && npx playwright install chromium --with-deps 2>/dev/null); then + log_info "Running browser extraction..." + if (cd "$pw_dir" && node fetch.mjs "$skill_url" "$output_file" 2>/dev/null); then + rm -rf "$pw_dir" + if [[ -f "$output_file" && -s "$output_file" ]]; then + log_success "Extracted SKILL.md ($(wc -c < "$output_file" | tr -d ' ') bytes)" + return 0 + fi + fi + fi + + rm -rf "$pw_dir" + log_warning "Playwright extraction failed, trying clawdhub CLI fallback..." + + # Fallback: try clawdhub CLI + if command -v npx &>/dev/null; then + log_info "Trying: npx clawdhub install $slug" + if (cd "$output_dir" && npx --yes clawdhub@latest install "$slug" --force 2>/dev/null); then + # clawdhub installs to ./skills//SKILL.md + local installed_skill + installed_skill=$(find "$output_dir" -name "SKILL.md" -type f 2>/dev/null | head -1) + if [[ -n "$installed_skill" && -f "$installed_skill" ]]; then + if [[ "$installed_skill" != "$output_file" ]]; then + cp "$installed_skill" "$output_file" + fi + log_success "Fetched via clawdhub CLI" + return 0 + fi + fi + fi + + log_error "Could not fetch SKILL.md for: $slug" + return 1 +} + +# ============================================================================= +# Commands +# ============================================================================= + +cmd_fetch() { + local input="$1" + shift || true + + local output_dir="" + + # Parse options + while [[ $# -gt 0 ]]; do + case "$1" in + --output) + output_dir="$2" + shift 2 + ;; + *) + log_error "Unknown option: $1" + return 1 + ;; + esac + done + + local slug + slug=$(parse_clawdhub_input "$input") + + if [[ -z "$slug" ]]; then + log_error "Could not parse slug from: $input" + return 1 + fi + + log_info "Skill slug: $slug" + + # Set default output directory + if [[ -z "$output_dir" ]]; then + output_dir="${TEMP_DIR}/${slug}" + fi + + # Fetch the skill content + fetch_skill_content_playwright "$slug" "$output_dir" + return $? +} + +cmd_search() { + local query="$1" + + if [[ -z "$query" ]]; then + log_error "Search query required" + return 1 + fi + + log_info "Searching ClawdHub for: $query" + + local encoded_query + encoded_query=$(python3 -c "import urllib.parse; print(urllib.parse.quote('$query'))" 2>/dev/null || echo "$query") + + local response + response=$(curl -s --connect-timeout 10 --max-time 30 "${CLAWDHUB_API}/search?q=${encoded_query}") + + if ! echo "$response" | python3 -c "import sys,json; json.load(sys.stdin)" 2>/dev/null; then + log_error "Search failed" + return 1 + fi + + echo "$response" | python3 -c " +import sys, json +data = json.load(sys.stdin) +results = data.get('results', []) +if not results: + print(' No results found') +else: + for r in results: + score = r.get('score', 0) + slug = r.get('slug', '?') + name = r.get('displayName', slug) + summary = r.get('summary', '')[:60] + print(f' {name} ({slug}) - score: {score:.2f}') + if summary: + print(f' {summary}') + print() +" + return 0 +} + +cmd_info() { + local input="$1" + + local slug + slug=$(parse_clawdhub_input "$input") + + if [[ -z "$slug" ]]; then + log_error "Could not parse slug from: $input" + return 1 + fi + + log_info "Fetching info for: $slug" + + local response + response=$(fetch_skill_info "$slug") || return 1 + + echo "$response" | python3 -c " +import sys, json +data = json.load(sys.stdin) +skill = data.get('skill', {}) +owner = data.get('owner', {}) +version = data.get('latestVersion', {}) +stats = skill.get('stats', {}) + +print(f' Name: {skill.get(\"displayName\", \"?\")}') +print(f' Slug: {skill.get(\"slug\", \"?\")}') +print(f' Owner: @{owner.get(\"handle\", \"?\")}') +print(f' Version: {version.get(\"version\", \"?\")}') +print(f' Summary: {skill.get(\"summary\", \"\")}') +print(f' Stars: {stats.get(\"stars\", 0)}') +print(f' Downloads: {stats.get(\"downloads\", 0)}') +print(f' Installs: {stats.get(\"installsCurrent\", 0)}') +print() +" + return 0 +} + +# ============================================================================= +# Main +# ============================================================================= + +main() { + local command="${1:-help}" + shift || true + + case "$command" in + fetch) + if [[ $# -lt 1 ]]; then + log_error "Slug or URL required" + echo "Usage: clawdhub-helper.sh fetch [--output ]" + return 1 + fi + cmd_fetch "$@" + ;; + search) + if [[ $# -lt 1 ]]; then + log_error "Search query required" + return 1 + fi + cmd_search "$@" + ;; + info) + if [[ $# -lt 1 ]]; then + log_error "Slug or URL required" + return 1 + fi + cmd_info "$@" + ;; + help|--help|-h) + show_help + ;; + *) + log_error "Unknown command: $command" + show_help + return 1 + ;; + esac +} + +main "$@" diff --git a/.agent/scripts/commands/add-skill.md b/.agent/scripts/commands/add-skill.md index 967b2e608..6feee9ce5 100644 --- a/.agent/scripts/commands/add-skill.md +++ b/.agent/scripts/commands/add-skill.md @@ -1,10 +1,10 @@ --- -description: Import external skills from GitHub repositories into aidevops +description: Import external skills from GitHub or ClawdHub into aidevops agent: Build+ mode: subagent --- -Import an external skill from a GitHub repository, convert it to aidevops format, and register it for update tracking. +Import an external skill from GitHub or ClawdHub, convert it to aidevops format, and register it for update tracking. URL/Repo: $ARGUMENTS @@ -19,6 +19,13 @@ URL/Repo: $ARGUMENTS /add-skill anthropics/skills/pdf # → .agent/tools/pdf-skill.md +# Import from ClawdHub (shorthand) +/add-skill clawdhub:caldav-calendar +# → .agent/tools/productivity/caldav-calendar-skill.md + +# Import from ClawdHub (full URL) +/add-skill https://clawdhub.com/Asleep123/caldav-calendar + # Import with custom name /add-skill vercel-labs/agent-skills --name vercel-deploy # → .agent/tools/deployment/vercel-deploy-skill.md @@ -51,7 +58,9 @@ This means: Determine if the input is: - A GitHub shorthand: `owner/repo` or `owner/repo/subpath` -- A full URL: `https://github.com/owner/repo` +- A full GitHub URL: `https://github.com/owner/repo` +- A ClawdHub shorthand: `clawdhub:` +- A ClawdHub URL: `https://clawdhub.com/owner/slug` - A command: `list`, `check-updates`, `remove ` ### Step 2: Run Helper Script @@ -90,11 +99,16 @@ After successful import: 2. Registered in `.agent/configs/skill-sources.json` for update tracking 3. Run `./setup.sh` to create symlinks for other AI assistants -## Supported Formats +## Supported Sources & Formats + +| Source | Detection | Fetch Method | +|--------|-----------|--------------| +| GitHub | `owner/repo` or github.com URL | `git clone --depth 1` | +| ClawdHub | `clawdhub:slug` or clawdhub.com URL | Playwright browser extraction | | Format | Detection | Conversion | |--------|-----------|------------| -| SKILL.md | OpenSkills/Claude Code | Frontmatter preserved, content adapted | +| SKILL.md | OpenSkills/Claude Code/ClawdHub | Frontmatter preserved, content adapted | | AGENTS.md | aidevops/Windsurf | Direct copy with mode: subagent | | .cursorrules | Cursor | Wrapped in markdown with frontmatter | | README.md | Generic | Copied as-is | @@ -111,6 +125,11 @@ After successful import: # Import Vercel deployment skill /add-skill vercel-labs/agent-skills +# Import from ClawdHub +/add-skill clawdhub:caldav-calendar +/add-skill clawdhub:proxmox-full +/add-skill https://clawdhub.com/mSarheed/proxmox-full + # Import with force (overwrite existing) /add-skill dmmulroy/cloudflare-skill --force @@ -144,5 +163,7 @@ Run `/add-skill check-updates` periodically to see if upstream skills have chang ## Related - `tools/build-agent/add-skill.md` - Detailed conversion logic and merge strategies +- `scripts/add-skill-helper.sh` - Main import implementation +- `scripts/clawdhub-helper.sh` - ClawdHub browser-based fetcher (Playwright) - `scripts/skill-update-helper.sh` - Automated update checking - `scripts/generate-skills.sh` - SKILL.md stub generation for aidevops agents diff --git a/.agent/services/hosting/proxmox-full-skill.md b/.agent/services/hosting/proxmox-full-skill.md new file mode 100644 index 000000000..5f3a8d90e --- /dev/null +++ b/.agent/services/hosting/proxmox-full-skill.md @@ -0,0 +1,224 @@ +--- +description: "Complete Proxmox VE hypervisor management via REST API - VMs, containers, snapshots, backups, storage" +mode: subagent +imported_from: clawdhub +clawdhub_slug: "proxmox-full" +clawdhub_version: "1.0.0" +--- +# Proxmox VE - Full Management + +Complete control over Proxmox VE hypervisor via REST API. + +## Setup + +```bash +export PVE_URL="https://192.168.1.10:8006" +export PVE_TOKEN="user@pam!tokenid=secret-uuid" +AUTH="Authorization: PVEAPIToken=$PVE_TOKEN" +``` + +## Cluster & Nodes + +```bash +# Cluster status +curl -sk -H "$AUTH" "$PVE_URL/api2/json/cluster/status" | jq + +# List nodes +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes" | jq '.data[] | {node, status, cpu, mem: (.mem/.maxmem*100|round)}' + +# Node details +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/status" | jq +``` + +## List VMs & Containers + +```bash +# VMs on a node +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu" | jq '.data[] | {vmid, name, status}' + +# Containers on a node +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/lxc" | jq '.data[] | {vmid, name, status}' + +# Cluster-wide resources +curl -sk -H "$AUTH" "$PVE_URL/api2/json/cluster/resources?type=vm" | jq '.data[] | {vmid, name, node, status, type}' +``` + +## VM/Container Control + +```bash +# Start +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/status/start" + +# Stop (immediate) +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/status/stop" + +# Shutdown (graceful ACPI) +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/status/shutdown" + +# Reboot +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/status/reboot" +``` + +Replace `qemu` with `lxc` for containers. + +## Create LXC Container + +```bash +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/lxc" \ + -d "vmid=200" \ + -d "hostname=mycontainer" \ + -d "ostemplate=local:vztmpl/debian-12-standard_12.2-1_amd64.tar.zst" \ + -d "storage=local-lvm" \ + -d "rootfs=local-lvm:8" \ + -d "memory=2048" \ + -d "swap=512" \ + -d "cores=2" \ + -d "net0=name=eth0,bridge=vmbr0,ip=dhcp" \ + -d "password=changeme" \ + -d "start=1" \ + -d "unprivileged=1" +``` + +## Create VM + +```bash +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu" \ + -d "vmid=100" \ + -d "name=myvm" \ + -d "memory=4096" \ + -d "cores=4" \ + -d "sockets=1" \ + -d "cpu=host" \ + -d "net0=virtio,bridge=vmbr0" \ + -d "scsi0=local-lvm:32" \ + -d "scsihw=virtio-scsi-single" \ + -d "ide2=local:iso/debian-12.iso,media=cdrom" \ + -d "boot=order=scsi0;ide2" \ + -d "ostype=l26" +``` + +## Clone VM/Container + +```bash +# Full clone +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/clone" \ + -d "newid=101" \ + -d "name=clone-vm" \ + -d "full=1" + +# Linked clone (faster, shares base) +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/clone" \ + -d "newid=102" \ + -d "name=linked-clone" \ + -d "full=0" +``` + +## Convert to Template + +```bash +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/template" +``` + +## Snapshots + +```bash +# List snapshots +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/snapshot" | jq + +# Create snapshot +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/snapshot" \ + -d "snapname=before-upgrade" \ + -d "description=Pre-upgrade snapshot" \ + -d "vmstate=1" + +# Rollback +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/snapshot/{snapname}/rollback" + +# Delete snapshot +curl -sk -X DELETE -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}/snapshot/{snapname}" +``` + +## Backups + +```bash +# Start backup +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/vzdump" \ + -d "vmid={vmid}" \ + -d "storage=local" \ + -d "mode=snapshot" \ + -d "compress=zstd" + +# List backups +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/storage/local/content?content=backup" | jq + +# Restore +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu" \ + -d "vmid=100" \ + -d "archive=local:backup/vzdump-qemu-100-2026_01_15.vma.zst" \ + -d "storage=local-lvm" +``` + +## Storage & Templates + +```bash +# List storage +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/storage" | jq '.data[] | {storage, type, active, content}' + +# List templates +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/storage/local/content?content=vztmpl" | jq + +# List ISOs +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/storage/local/content?content=iso" | jq + +# Download template +curl -sk -X POST -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/aplinfo" \ + -d "storage=local" \ + -d "template=debian-12-standard_12.2-1_amd64.tar.zst" +``` + +## Tasks + +```bash +# List recent tasks +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/tasks?limit=10" | jq + +# Task status +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/tasks/{upid}/status" | jq + +# Task log +curl -sk -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/tasks/{upid}/log" | jq +``` + +## Delete VM/Container + +```bash +# Normal delete (must be stopped) +curl -sk -X DELETE -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}" + +# Force purge (removes all related data) +curl -sk -X DELETE -H "$AUTH" "$PVE_URL/api2/json/nodes/{node}/qemu/{vmid}?purge=1&destroy-unreferenced-disks=1" +``` + +## Quick Reference + +| Operation | Endpoint | +|-----------|----------| +| List nodes | `GET /nodes` | +| List VMs | `GET /nodes/{node}/qemu` | +| List LXC | `GET /nodes/{node}/lxc` | +| Create VM | `POST /nodes/{node}/qemu` | +| Create LXC | `POST /nodes/{node}/lxc` | +| Clone | `POST /nodes/{node}/qemu/{vmid}/clone` | +| Start | `POST /nodes/{node}/qemu/{vmid}/status/start` | +| Stop | `POST /nodes/{node}/qemu/{vmid}/status/stop` | +| Snapshot | `POST /nodes/{node}/qemu/{vmid}/snapshot` | +| Delete | `DELETE /nodes/{node}/qemu/{vmid}` | +| Next ID | `GET /cluster/nextid` | + +## Notes + +- Use `-k` flag for self-signed certificates +- API tokens don't need CSRF tokens (unlike cookie auth) +- Replace `{node}`, `{vmid}`, `{snapname}`, `{upid}` with actual values +- All create/clone operations return a task UPID for tracking +- Get next available VMID: `curl -sk -H "$AUTH" "$PVE_URL/api2/json/cluster/nextid" | jq '.data'` diff --git a/.agent/tools/build-agent/add-skill.md b/.agent/tools/build-agent/add-skill.md index b652c579a..d1e650a77 100644 --- a/.agent/tools/build-agent/add-skill.md +++ b/.agent/tools/build-agent/add-skill.md @@ -5,25 +5,32 @@ mode: subagent # Add Skill - External Skill Import System -Import skills from external sources (GitHub repos, gists) and convert them to aidevops format while preserving knowledge and handling conflicts intelligently. +Import skills from external sources (GitHub repos, ClawdHub registry) and convert them to aidevops format while preserving knowledge and handling conflicts intelligently. ## Quick Reference | Command | Purpose | |---------|---------| -| `/add-skill ` | Import skill from GitHub | +| `/add-skill ` | Import skill from GitHub or ClawdHub | +| `/add-skill clawdhub:` | Import skill from ClawdHub registry | | `/add-skill list` | List imported skills | | `/add-skill check-updates` | Check for upstream changes | | `/add-skill remove ` | Remove imported skill | -**Helper script:** `~/.aidevops/agents/scripts/add-skill-helper.sh` +**Helper scripts:** +- `~/.aidevops/agents/scripts/add-skill-helper.sh` — Main import logic +- `~/.aidevops/agents/scripts/clawdhub-helper.sh` — ClawdHub browser-based fetcher ## Architecture ```text -External Skill (GitHub) +External Skill (GitHub or ClawdHub) + ↓ + Detect Source (GitHub URL / clawdhub: prefix / clawdhub.com URL) ↓ Fetch & Detect Format + ├── GitHub: git clone --depth 1 + └── ClawdHub: Playwright browser extraction (SPA) ↓ Check Conflicts with .agent/ ↓ @@ -154,6 +161,8 @@ The helper script analyzes skill content to determine placement: |----------|----------| | deploy, vercel, coolify, docker, kubernetes | `tools/deployment/` | | cloudflare, dns, hosting, domain | `services/hosting/` | +| proxmox, hypervisor, virtualization | `services/hosting/` | +| calendar, caldav, ical, scheduling | `tools/productivity/` | | browser, playwright, puppeteer | `tools/browser/` | | seo, search, ranking, keyword | `seo/` | | git, github, gitlab | `tools/git/` | @@ -232,6 +241,8 @@ create_skill_symlinks() { ## Popular Skills to Import +### GitHub + | Skill | Repository | Description | |-------|------------|-------------| | Cloudflare | `dmmulroy/cloudflare-skill` | 60+ Cloudflare products | @@ -242,14 +253,25 @@ create_skill_symlinks() { Browse more at [skills.sh](https://skills.sh) leaderboard. +### ClawdHub + +| Skill | Slug | Description | +|-------|------|-------------| +| CalDAV Calendar | `clawdhub:caldav-calendar` | CalDAV sync via vdirsyncer + khal | +| Proxmox Full | `clawdhub:proxmox-full` | Complete Proxmox VE management | + +Browse more at [clawdhub.com](https://clawdhub.com) — vector search for agent skills. + ## Troubleshooting -### "Could not parse GitHub URL" +### "Could not parse source URL" -Ensure URL is in format: -- `owner/repo` -- `owner/repo/subpath` +Ensure URL is in one of these formats: +- `owner/repo` (GitHub) +- `owner/repo/subpath` (GitHub) - `https://github.com/owner/repo` +- `clawdhub:slug` (ClawdHub) +- `https://clawdhub.com/owner/slug` (ClawdHub) ### "Failed to clone repository" @@ -278,6 +300,7 @@ It does NOT check for semantic duplicates. Use `/add-skill list` to review. - `scripts/commands/add-skill.md` - Slash command definition - `scripts/add-skill-helper.sh` - Main implementation +- `scripts/clawdhub-helper.sh` - ClawdHub browser-based fetcher - `scripts/skill-update-helper.sh` - Automated update checking - `scripts/generate-skills.sh` - SKILL.md generation for aidevops agents - `build-agent.md` - Agent design patterns diff --git a/.agent/tools/productivity/caldav-calendar-skill.md b/.agent/tools/productivity/caldav-calendar-skill.md new file mode 100644 index 000000000..34c03d6e1 --- /dev/null +++ b/.agent/tools/productivity/caldav-calendar-skill.md @@ -0,0 +1,63 @@ +--- +description: "Sync and query CalDAV calendars (iCloud, Google, Fastmail, Nextcloud, etc.) using vdirsyncer + khal" +mode: subagent +imported_from: clawdhub +clawdhub_slug: "caldav-calendar" +clawdhub_version: "1.0.1" +--- +# CalDAV Calendar (vdirsyncer + khal) + +vdirsyncer syncs CalDAV calendars to local .ics files. khal reads and writes them. + +**Sync First** — Always sync before querying or after making changes: `vdirsyncer sync` + +## View Events + +```bash +khal list # Today +khal list today 7d # Next 7 days +khal list tomorrow # Tomorrow +khal list 2026-01-15 2026-01-20 # Date range +khal list -a Work today # Specific calendar +``` + +## Search + +```bash +khal search "meeting" +khal search "dentist" --format "{start-date} {title}" +``` + +## Create Events + +```bash +khal new 2026-01-15 10:00 11:00 "Meeting title" +khal new 2026-01-15 "All day event" +khal new tomorrow 14:00 15:30 "Call" -a Work +khal new 2026-01-15 10:00 11:00 "With notes" :: Description goes here +``` + +## Edit Events + +Interactive (requires TTY): +- `s` — edit summary +- `d` — description +- `t` — datetime +- `l` — location +- `D` — delete +- `n` — skip +- `q` — quit + +## Output Formats + +Placeholders: `{title}`, `{description}`, `{start}`, `{end}`, `{start-date}`, `{start-time}`, `{end-date}`, `{end-time}`, `{location}`, `{calendar}`, `{uid}` + +## Caching + +Remove stale cache: `rm ~/.local/share/khal/khal.db` + +## Initial Setup + +1. Configure vdirsyncer (`~/.config/vdirsyncer/config`) — supports iCloud, Google, Fastmail, Nextcloud +2. Configure khal (`~/.config/khal/config`) +3. Run: `vdirsyncer discover && vdirsyncer sync`