All checks were successful
Build and Release / Lint (push) Successful in 5m21s
Build and Release / Create Release (push) Successful in 0s
Build and Release / Build Binaries (amd64, darwin, macos) (push) Successful in 4m21s
Build and Release / Build Binaries (arm64, darwin, macos) (push) Successful in 4m32s
Build and Release / Integration Tests (PostgreSQL) (push) Successful in 12m1s
Build and Release / Build Binary (linux/arm64) (push) Successful in 7m55s
Build and Release / Build Binaries (amd64, windows, windows-latest) (push) Successful in 9h7m31s
Build and Release / Unit Tests (push) Successful in 14m12s
Build and Release / Build Binaries (amd64, linux, linux-latest) (push) Successful in 12m51s
Implements three new MCP tools for landing page management: update_landing_stats for stat counters, update_landing_value_props for value proposition cards, and update_landing_cta for bottom call-to-action section. Each tool supports structured data with validation and integrates with existing config save flow.
2660 lines
75 KiB
Go
2660 lines
75 KiB
Go
// Copyright 2026 MarketAlly. All rights reserved.
|
|
// SPDX-License-Identifier: MIT
|
|
|
|
package v2
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"errors"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"net/url"
|
|
"strings"
|
|
"time"
|
|
|
|
actions_model "code.gitcaddy.com/server/v3/models/actions"
|
|
"code.gitcaddy.com/server/v3/models/db"
|
|
issue_model "code.gitcaddy.com/server/v3/models/issues"
|
|
packages_model "code.gitcaddy.com/server/v3/models/packages"
|
|
repo_model "code.gitcaddy.com/server/v3/models/repo"
|
|
secret_model "code.gitcaddy.com/server/v3/models/secret"
|
|
"code.gitcaddy.com/server/v3/models/unit"
|
|
user_model "code.gitcaddy.com/server/v3/models/user"
|
|
"code.gitcaddy.com/server/v3/modules/actions"
|
|
"code.gitcaddy.com/server/v3/modules/git"
|
|
"code.gitcaddy.com/server/v3/modules/json"
|
|
"code.gitcaddy.com/server/v3/modules/log"
|
|
"code.gitcaddy.com/server/v3/modules/optional"
|
|
"code.gitcaddy.com/server/v3/modules/setting"
|
|
api "code.gitcaddy.com/server/v3/modules/structs"
|
|
"code.gitcaddy.com/server/v3/modules/util"
|
|
actions_service "code.gitcaddy.com/server/v3/services/actions"
|
|
context_service "code.gitcaddy.com/server/v3/services/context"
|
|
notify_service "code.gitcaddy.com/server/v3/services/notify"
|
|
|
|
"github.com/nektos/act/pkg/jobparser"
|
|
"github.com/nektos/act/pkg/model"
|
|
"gopkg.in/yaml.v3"
|
|
"xorm.io/builder"
|
|
)
|
|
|
|
// MCP Protocol Types (JSON-RPC 2.0)
|
|
|
|
type MCPRequest struct {
|
|
JSONRPC string `json:"jsonrpc"`
|
|
ID any `json:"id"`
|
|
Method string `json:"method"`
|
|
Params json.RawMessage `json:"params,omitempty"`
|
|
}
|
|
|
|
type MCPResponse struct {
|
|
JSONRPC string `json:"jsonrpc"`
|
|
ID any `json:"id"`
|
|
Result any `json:"result,omitempty"`
|
|
Error *MCPError `json:"error,omitempty"`
|
|
}
|
|
|
|
type MCPError struct {
|
|
Code int `json:"code"`
|
|
Message string `json:"message"`
|
|
Data any `json:"data,omitempty"`
|
|
}
|
|
|
|
// MCP Tool definitions
|
|
type MCPTool struct {
|
|
Name string `json:"name"`
|
|
Description string `json:"description"`
|
|
InputSchema map[string]any `json:"inputSchema"`
|
|
}
|
|
|
|
type MCPToolsListResult struct {
|
|
Tools []MCPTool `json:"tools"`
|
|
}
|
|
|
|
type MCPToolCallParams struct {
|
|
Name string `json:"name"`
|
|
Arguments map[string]any `json:"arguments"`
|
|
}
|
|
|
|
type MCPToolCallResult struct {
|
|
Content []MCPContent `json:"content"`
|
|
IsError bool `json:"isError,omitempty"`
|
|
}
|
|
|
|
type MCPContent struct {
|
|
Type string `json:"type"`
|
|
Text string `json:"text"`
|
|
}
|
|
|
|
type MCPInitializeParams struct {
|
|
ProtocolVersion string `json:"protocolVersion"`
|
|
Capabilities map[string]any `json:"capabilities"`
|
|
ClientInfo map[string]string `json:"clientInfo"`
|
|
}
|
|
|
|
type MCPInitializeResult struct {
|
|
ProtocolVersion string `json:"protocolVersion"`
|
|
Capabilities map[string]any `json:"capabilities"`
|
|
ServerInfo map[string]string `json:"serverInfo"`
|
|
}
|
|
|
|
// Available MCP tools
|
|
var mcpTools = []MCPTool{
|
|
{
|
|
Name: "list_runners",
|
|
Description: "List all runners with their status, capabilities, and current workload",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner (optional, lists global runners if omitted)",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name (optional)",
|
|
},
|
|
"status": map[string]any{
|
|
"type": "string",
|
|
"enum": []string{"online", "offline", "all"},
|
|
"description": "Filter by runner status",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_runner",
|
|
Description: "Get detailed information about a specific runner including capabilities, disk space, and bandwidth",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"runner_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The runner ID",
|
|
},
|
|
},
|
|
"required": []string{"runner_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_workflow_runs",
|
|
Description: "List workflow runs for a repository with status and timing information",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"status": map[string]any{
|
|
"type": "string",
|
|
"enum": []string{"pending", "running", "success", "failure", "cancelled", "all"},
|
|
"description": "Filter by run status",
|
|
},
|
|
"limit": map[string]any{
|
|
"type": "integer",
|
|
"description": "Maximum number of runs to return (default 20)",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_workflow_run",
|
|
Description: "Get detailed information about a specific workflow run including all jobs and their status",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_job_logs",
|
|
Description: "Get logs from a specific job in a workflow run. For failed jobs, automatically extracts error context.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"job_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The job ID",
|
|
},
|
|
"level": map[string]any{
|
|
"type": "string",
|
|
"enum": []string{"errors", "warnings", "all"},
|
|
"description": "Log filter level: 'errors' returns only error lines, 'warnings' returns errors and warnings, 'all' returns full logs (default: 'errors' for failed jobs, 'all' otherwise)",
|
|
},
|
|
"errors_only": map[string]any{
|
|
"type": "boolean",
|
|
"description": "Deprecated: use 'level' instead. If true, equivalent to level='errors'",
|
|
},
|
|
"context_lines": map[string]any{
|
|
"type": "integer",
|
|
"description": "Number of lines before/after each matched line to include (default: 5)",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "job_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "cancel_workflow_run",
|
|
Description: "Cancel a running workflow run and all its jobs. Only works on runs that are not yet completed.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID to cancel",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_releases",
|
|
Description: "List releases for a repository",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"limit": map[string]any{
|
|
"type": "integer",
|
|
"description": "Maximum number of releases to return (default 10)",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_release",
|
|
Description: "Get details of a specific release including all assets",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"tag": map[string]any{
|
|
"type": "string",
|
|
"description": "Release tag (e.g., v1.0.0)",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "tag"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_secrets",
|
|
Description: "List available secrets (names and descriptions only, not values) for workflows. Shows global, organization, and repository secrets.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository or organization owner (optional, shows global secrets if omitted)",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name (optional, shows org secrets if omitted)",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_packages",
|
|
Description: "List packages for an owner or globally. Shows package name, type, version info, and visibility.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Package owner (user or organization). If omitted, lists global packages.",
|
|
},
|
|
"type": map[string]any{
|
|
"type": "string",
|
|
"description": "Filter by package type (e.g., nuget, npm, container, generic)",
|
|
},
|
|
"limit": map[string]any{
|
|
"type": "integer",
|
|
"description": "Maximum number of packages to return (default 50)",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Name: "rerun_workflow",
|
|
Description: "Rerun a completed workflow run or a specific failed job. Only works on runs that have finished.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID to rerun",
|
|
},
|
|
"job_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "Optional: specific job ID to rerun. If omitted, reruns all jobs.",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "trigger_workflow",
|
|
Description: "Manually trigger a workflow_dispatch workflow with optional inputs.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"workflow": map[string]any{
|
|
"type": "string",
|
|
"description": "Workflow filename (e.g., build.yml)",
|
|
},
|
|
"ref": map[string]any{
|
|
"type": "string",
|
|
"description": "Git ref to run on (branch, tag, or SHA)",
|
|
},
|
|
"inputs": map[string]any{
|
|
"type": "object",
|
|
"description": "Optional workflow inputs as key-value pairs",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "workflow", "ref"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_artifacts",
|
|
Description: "List artifacts from a workflow run.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "approve_workflow",
|
|
Description: "Approve a workflow run that requires approval (e.g., from fork PRs).",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID to approve",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_workflows",
|
|
Description: "List available workflow files in a repository.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"ref": map[string]any{
|
|
"type": "string",
|
|
"description": "Git ref (branch/tag). Defaults to default branch.",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_queue_depth",
|
|
Description: "Get the number of waiting jobs per runner label. Useful for understanding runner capacity.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_workflow_file",
|
|
Description: "Get the content of a workflow YAML file.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"workflow": map[string]any{
|
|
"type": "string",
|
|
"description": "Workflow filename (e.g., build.yml)",
|
|
},
|
|
"ref": map[string]any{
|
|
"type": "string",
|
|
"description": "Git ref (branch/tag). Defaults to default branch.",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "workflow"},
|
|
},
|
|
},
|
|
{
|
|
Name: "validate_workflow",
|
|
Description: "Validate a workflow YAML file for parse errors. Can validate by filename (from repo) or by raw YAML content. Returns validation status and any errors found.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner (required if validating from repo)",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name (required if validating from repo)",
|
|
},
|
|
"workflow": map[string]any{
|
|
"type": "string",
|
|
"description": "Workflow filename to validate from repo (e.g., build.yml)",
|
|
},
|
|
"ref": map[string]any{
|
|
"type": "string",
|
|
"description": "Git ref (branch/tag). Defaults to default branch.",
|
|
},
|
|
"content": map[string]any{
|
|
"type": "string",
|
|
"description": "Raw YAML content to validate directly (alternative to owner/repo/workflow)",
|
|
},
|
|
},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_artifact_download_url",
|
|
Description: "Get the download URL for a workflow artifact.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"run_id": map[string]any{
|
|
"type": "integer",
|
|
"description": "The workflow run ID",
|
|
},
|
|
"artifact_name": map[string]any{
|
|
"type": "string",
|
|
"description": "Name of the artifact to download",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "run_id", "artifact_name"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_package_defaults",
|
|
Description: "Get preconfigured package defaults for an organization, with repo-specific URLs filled in. Returns authors, company, copyright, icon URL, and repository URLs for building valid package metadata.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Organization or user name",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name (optional, for generating repo-specific URLs)",
|
|
},
|
|
},
|
|
"required": []string{"owner"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_repos",
|
|
Description: "List repositories for an owner (organization or user). Returns name, description, default branch, visibility, and URL.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner (organization or user name)",
|
|
},
|
|
"limit": map[string]any{
|
|
"type": "integer",
|
|
"description": "Maximum number of repos to return (default 50)",
|
|
},
|
|
},
|
|
"required": []string{"owner"},
|
|
},
|
|
},
|
|
{
|
|
Name: "list_issues",
|
|
Description: "List issues for a repository with pagination. Returns issue number, title, state, labels, poster, timestamps, and total count.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"state": map[string]any{
|
|
"type": "string",
|
|
"description": "Filter by state: open, closed, or all (default: open)",
|
|
"enum": []string{"open", "closed", "all"},
|
|
},
|
|
"page": map[string]any{
|
|
"type": "integer",
|
|
"description": "Page number (default 1)",
|
|
},
|
|
"limit": map[string]any{
|
|
"type": "integer",
|
|
"description": "Results per page (default 20, max 100)",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo"},
|
|
},
|
|
},
|
|
{
|
|
Name: "get_issue",
|
|
Description: "Get detailed information about a specific issue including body content and comments.",
|
|
InputSchema: map[string]any{
|
|
"type": "object",
|
|
"properties": map[string]any{
|
|
"owner": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository owner",
|
|
},
|
|
"repo": map[string]any{
|
|
"type": "string",
|
|
"description": "Repository name",
|
|
},
|
|
"number": map[string]any{
|
|
"type": "integer",
|
|
"description": "Issue number",
|
|
},
|
|
},
|
|
"required": []string{"owner", "repo", "number"},
|
|
},
|
|
},
|
|
}
|
|
|
|
// MCPHandler handles MCP protocol requests
|
|
// @Summary MCP Protocol Endpoint
|
|
// @Description Handles Model Context Protocol requests for AI tool integration
|
|
// @Tags mcp
|
|
// @Accept json
|
|
// @Produce json
|
|
// @Success 200 {object} MCPResponse
|
|
// @Router /mcp [post]
|
|
func MCPHandler(ctx *context_service.APIContext) {
|
|
body, err := io.ReadAll(ctx.Req.Body)
|
|
if err != nil {
|
|
sendMCPError(ctx, nil, -32700, "Parse error", err.Error())
|
|
return
|
|
}
|
|
|
|
var req MCPRequest
|
|
if err := json.Unmarshal(body, &req); err != nil {
|
|
sendMCPError(ctx, nil, -32700, "Parse error", err.Error())
|
|
return
|
|
}
|
|
|
|
if req.JSONRPC != "2.0" {
|
|
sendMCPError(ctx, req.ID, -32600, "Invalid Request", "jsonrpc must be 2.0")
|
|
return
|
|
}
|
|
|
|
log.Debug("MCP request: method=%s id=%v", req.Method, req.ID)
|
|
|
|
switch req.Method {
|
|
case "initialize":
|
|
handleInitialize(ctx, &req)
|
|
case "tools/list":
|
|
handleToolsList(ctx, &req)
|
|
case "tools/call":
|
|
handleToolsCall(ctx, &req)
|
|
case "ping":
|
|
sendMCPResult(ctx, req.ID, map[string]string{})
|
|
default:
|
|
sendMCPError(ctx, req.ID, -32601, "Method not found", "Unknown method: "+req.Method)
|
|
}
|
|
}
|
|
|
|
func handleInitialize(ctx *context_service.APIContext, req *MCPRequest) {
|
|
result := MCPInitializeResult{
|
|
ProtocolVersion: "2024-11-05",
|
|
Capabilities: map[string]any{
|
|
"tools": map[string]any{},
|
|
},
|
|
ServerInfo: map[string]string{
|
|
"name": "gitea-actions",
|
|
"version": setting.AppVer,
|
|
},
|
|
}
|
|
sendMCPResult(ctx, req.ID, result)
|
|
}
|
|
|
|
func handleToolsList(ctx *context_service.APIContext, req *MCPRequest) {
|
|
allTools := make([]MCPTool, 0, len(mcpTools)+len(mcpAITools)+len(mcpPagesTools))
|
|
allTools = append(allTools, mcpTools...)
|
|
allTools = append(allTools, mcpAITools...)
|
|
allTools = append(allTools, mcpPagesTools...)
|
|
result := MCPToolsListResult{Tools: allTools}
|
|
sendMCPResult(ctx, req.ID, result)
|
|
}
|
|
|
|
func handleToolsCall(ctx *context_service.APIContext, req *MCPRequest) {
|
|
var params MCPToolCallParams
|
|
if err := json.Unmarshal(req.Params, ¶ms); err != nil {
|
|
sendMCPError(ctx, req.ID, -32602, "Invalid params", err.Error())
|
|
return
|
|
}
|
|
|
|
var result any
|
|
var err error
|
|
|
|
switch params.Name {
|
|
case "list_runners":
|
|
result, err = toolListRunners(ctx, params.Arguments)
|
|
case "get_runner":
|
|
result, err = toolGetRunner(ctx, params.Arguments)
|
|
case "list_workflow_runs":
|
|
result, err = toolListWorkflowRuns(ctx, params.Arguments)
|
|
case "get_workflow_run":
|
|
result, err = toolGetWorkflowRun(ctx, params.Arguments)
|
|
case "get_job_logs":
|
|
result, err = toolGetJobLogs(ctx, params.Arguments)
|
|
case "cancel_workflow_run":
|
|
result, err = toolCancelWorkflowRun(ctx, params.Arguments)
|
|
case "list_releases":
|
|
result, err = toolListReleases(ctx, params.Arguments)
|
|
case "get_release":
|
|
result, err = toolGetRelease(ctx, params.Arguments)
|
|
case "list_secrets":
|
|
result, err = toolListSecrets(ctx, params.Arguments)
|
|
case "list_packages":
|
|
result, err = toolListPackages(ctx, params.Arguments)
|
|
case "rerun_workflow":
|
|
result, err = toolRerunWorkflow(ctx, params.Arguments)
|
|
case "trigger_workflow":
|
|
result, err = toolTriggerWorkflow(ctx, params.Arguments)
|
|
case "list_artifacts":
|
|
result, err = toolListArtifacts(ctx, params.Arguments)
|
|
case "approve_workflow":
|
|
result, err = toolApproveWorkflow(ctx, params.Arguments)
|
|
case "list_workflows":
|
|
result, err = toolListWorkflows(ctx, params.Arguments)
|
|
case "get_queue_depth":
|
|
result, err = toolGetQueueDepth(ctx, params.Arguments)
|
|
case "get_workflow_file":
|
|
result, err = toolGetWorkflowFile(ctx, params.Arguments)
|
|
case "validate_workflow":
|
|
result, err = toolValidateWorkflow(ctx, params.Arguments)
|
|
case "get_artifact_download_url":
|
|
result, err = toolGetArtifactDownloadURL(ctx, params.Arguments)
|
|
case "get_error_patterns":
|
|
result, err = toolGetErrorPatterns(ctx, params.Arguments)
|
|
case "report_error_solution":
|
|
result, err = toolReportErrorSolution(ctx, params.Arguments)
|
|
case "report_solution_success":
|
|
result, err = toolReportSolutionSuccess(ctx, params.Arguments)
|
|
case "get_compatibility_matrix":
|
|
result, err = toolGetCompatibilityMatrix(ctx, params.Arguments)
|
|
case "diagnose_job_failure":
|
|
result, err = toolDiagnoseJobFailure(ctx, params.Arguments)
|
|
case "get_package_defaults":
|
|
result, err = toolGetPackageDefaults(ctx, params.Arguments)
|
|
case "list_repos":
|
|
result, err = toolListRepos(ctx, params.Arguments)
|
|
case "list_issues":
|
|
result, err = toolListIssues(ctx, params.Arguments)
|
|
case "get_issue":
|
|
result, err = toolGetIssue(ctx, params.Arguments)
|
|
// Landing Pages tools
|
|
case "get_landing_config":
|
|
result, err = toolGetLandingConfig(ctx, params.Arguments)
|
|
case "list_landing_templates":
|
|
result, err = toolListLandingTemplates(ctx, params.Arguments)
|
|
case "enable_landing_page":
|
|
result, err = toolEnableLandingPage(ctx, params.Arguments)
|
|
case "update_landing_brand":
|
|
result, err = toolUpdateLandingBrand(ctx, params.Arguments)
|
|
case "update_landing_hero":
|
|
result, err = toolUpdateLandingHero(ctx, params.Arguments)
|
|
case "update_landing_pricing":
|
|
result, err = toolUpdateLandingPricing(ctx, params.Arguments)
|
|
case "update_landing_comparison":
|
|
result, err = toolUpdateLandingComparison(ctx, params.Arguments)
|
|
case "update_landing_features":
|
|
result, err = toolUpdateLandingFeatures(ctx, params.Arguments)
|
|
case "update_landing_social_proof":
|
|
result, err = toolUpdateLandingSocialProof(ctx, params.Arguments)
|
|
case "update_landing_seo":
|
|
result, err = toolUpdateLandingSEO(ctx, params.Arguments)
|
|
case "update_landing_theme":
|
|
result, err = toolUpdateLandingTheme(ctx, params.Arguments)
|
|
case "update_landing_stats":
|
|
result, err = toolUpdateLandingStats(ctx, params.Arguments)
|
|
case "update_landing_value_props":
|
|
result, err = toolUpdateLandingValueProps(ctx, params.Arguments)
|
|
case "update_landing_cta":
|
|
result, err = toolUpdateLandingCTA(ctx, params.Arguments)
|
|
default:
|
|
sendMCPError(ctx, req.ID, -32602, "Unknown tool", params.Name)
|
|
return
|
|
}
|
|
|
|
if err != nil {
|
|
sendMCPToolResult(ctx, req.ID, err.Error(), true)
|
|
return
|
|
}
|
|
|
|
// Convert result to JSON text
|
|
jsonBytes, _ := json.MarshalIndent(result, "", " ")
|
|
sendMCPToolResult(ctx, req.ID, string(jsonBytes), false)
|
|
}
|
|
|
|
func sendMCPResult(ctx *context_service.APIContext, id, result any) {
|
|
ctx.JSON(http.StatusOK, MCPResponse{
|
|
JSONRPC: "2.0",
|
|
ID: id,
|
|
Result: result,
|
|
})
|
|
}
|
|
|
|
func sendMCPError(ctx *context_service.APIContext, id any, code int, message, data string) {
|
|
ctx.JSON(http.StatusOK, MCPResponse{
|
|
JSONRPC: "2.0",
|
|
ID: id,
|
|
Error: &MCPError{
|
|
Code: code,
|
|
Message: message,
|
|
Data: data,
|
|
},
|
|
})
|
|
}
|
|
|
|
func sendMCPToolResult(ctx *context_service.APIContext, id any, text string, isError bool) {
|
|
ctx.JSON(http.StatusOK, MCPResponse{
|
|
JSONRPC: "2.0",
|
|
ID: id,
|
|
Result: MCPToolCallResult{
|
|
Content: []MCPContent{{Type: "text", Text: text}},
|
|
IsError: isError,
|
|
},
|
|
})
|
|
}
|
|
|
|
// Tool implementations
|
|
|
|
func toolListRunners(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
var runners actions_model.RunnerList
|
|
var err error
|
|
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
if owner != "" && repo != "" {
|
|
// Get repo-specific runners
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
runners, err = actions_model.GetRunnersOfRepo(ctx, repository.ID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
} else {
|
|
// Get all runners (admin)
|
|
opts := actions_model.FindRunnerOptions{}
|
|
opts.PageSize = 100
|
|
runners, err = db.Find[actions_model.ActionRunner](ctx, opts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
|
|
statusFilter, _ := args["status"].(string)
|
|
|
|
result := make([]map[string]any, 0, len(runners))
|
|
for _, runner := range runners {
|
|
isOnline := runner.IsOnline()
|
|
|
|
if statusFilter == "online" && !isOnline {
|
|
continue
|
|
}
|
|
if statusFilter == "offline" && isOnline {
|
|
continue
|
|
}
|
|
|
|
r := map[string]any{
|
|
"id": runner.ID,
|
|
"name": runner.Name,
|
|
"is_online": isOnline,
|
|
"status": runner.Status().String(),
|
|
"version": runner.Version,
|
|
"labels": runner.AgentLabels,
|
|
"last_online": runner.LastOnline.AsTime().Format(time.RFC3339),
|
|
}
|
|
|
|
// Parse capabilities if available
|
|
if runner.CapabilitiesJSON != "" {
|
|
var caps api.RunnerCapability
|
|
if json.Unmarshal([]byte(runner.CapabilitiesJSON), &caps) == nil {
|
|
r["capabilities"] = caps
|
|
}
|
|
}
|
|
|
|
result = append(result, r)
|
|
}
|
|
|
|
return map[string]any{
|
|
"runners": result,
|
|
"count": len(result),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetRunner(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
runnerIDFloat, ok := args["runner_id"].(float64)
|
|
if !ok {
|
|
return nil, errors.New("runner_id is required")
|
|
}
|
|
runnerID := int64(runnerIDFloat)
|
|
|
|
runner, err := actions_model.GetRunnerByID(ctx, runnerID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("runner not found: %d", runnerID)
|
|
}
|
|
|
|
result := map[string]any{
|
|
"id": runner.ID,
|
|
"name": runner.Name,
|
|
"is_online": runner.IsOnline(),
|
|
"status": runner.Status().String(),
|
|
"version": runner.Version,
|
|
"labels": runner.AgentLabels,
|
|
"last_online": runner.LastOnline.AsTime().Format(time.RFC3339),
|
|
"repo_id": runner.RepoID,
|
|
"owner_id": runner.OwnerID,
|
|
}
|
|
|
|
if runner.CapabilitiesJSON != "" {
|
|
var caps api.RunnerCapability
|
|
if json.Unmarshal([]byte(runner.CapabilitiesJSON), &caps) == nil {
|
|
result["capabilities"] = caps
|
|
}
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func toolListWorkflowRuns(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
if owner == "" || repo == "" {
|
|
return nil, errors.New("owner and repo are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
limit := 20
|
|
if l, ok := args["limit"].(float64); ok {
|
|
limit = int(l)
|
|
}
|
|
|
|
opts := actions_model.FindRunOptions{
|
|
RepoID: repository.ID,
|
|
}
|
|
opts.PageSize = limit
|
|
|
|
runs, err := db.Find[actions_model.ActionRun](ctx, opts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
statusFilter, _ := args["status"].(string)
|
|
|
|
result := make([]map[string]any, 0, len(runs))
|
|
for _, run := range runs {
|
|
status := run.Status.String()
|
|
|
|
if statusFilter != "" && statusFilter != "all" && !strings.EqualFold(status, statusFilter) {
|
|
continue
|
|
}
|
|
|
|
r := map[string]any{
|
|
"id": run.ID,
|
|
"title": run.Title,
|
|
"status": status,
|
|
"event": string(run.Event),
|
|
"workflow_id": run.WorkflowID,
|
|
"ref": run.Ref,
|
|
"commit_sha": run.CommitSHA,
|
|
"started": run.Started.AsTime().Format(time.RFC3339),
|
|
"stopped": run.Stopped.AsTime().Format(time.RFC3339),
|
|
}
|
|
|
|
result = append(result, r)
|
|
}
|
|
|
|
return map[string]any{
|
|
"runs": result,
|
|
"count": len(result),
|
|
"repo": fmt.Sprintf("%s/%s", owner, repo),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetWorkflowRun(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and run_id are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
// Get jobs for this run
|
|
jobs, err := actions_model.GetRunJobsByRunID(ctx, runID)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
jobResults := make([]map[string]any, 0, len(jobs))
|
|
for _, job := range jobs {
|
|
j := map[string]any{
|
|
"id": job.ID,
|
|
"name": job.Name,
|
|
"status": job.Status.String(),
|
|
"started": job.Started.AsTime().Format(time.RFC3339),
|
|
"stopped": job.Stopped.AsTime().Format(time.RFC3339),
|
|
"task_id": job.TaskID,
|
|
}
|
|
jobResults = append(jobResults, j)
|
|
}
|
|
|
|
return map[string]any{
|
|
"id": run.ID,
|
|
"title": run.Title,
|
|
"status": run.Status.String(),
|
|
"event": string(run.Event),
|
|
"workflow_id": run.WorkflowID,
|
|
"ref": run.Ref,
|
|
"commit_sha": run.CommitSHA,
|
|
"started": run.Started.AsTime().Format(time.RFC3339),
|
|
"stopped": run.Stopped.AsTime().Format(time.RFC3339),
|
|
"jobs": jobResults,
|
|
"job_count": len(jobResults),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetJobLogs(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
jobIDFloat, ok := args["job_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and job_id are required")
|
|
}
|
|
jobID := int64(jobIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
job, err := actions_model.GetRunJobByID(ctx, jobID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("job not found: %d", jobID)
|
|
}
|
|
|
|
// Verify job belongs to this repo
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, job.RunID)
|
|
if err != nil {
|
|
return nil, errors.New("job not found in repository")
|
|
}
|
|
_ = run
|
|
|
|
// Get the task for this job
|
|
if job.TaskID == 0 {
|
|
return map[string]any{
|
|
"job_id": jobID,
|
|
"job_name": job.Name,
|
|
"status": job.Status.String(),
|
|
"message": "Job has not started yet - no task assigned",
|
|
}, nil
|
|
}
|
|
|
|
task, err := actions_model.GetTaskByID(ctx, job.TaskID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("task not found for job: %d", jobID)
|
|
}
|
|
|
|
// Check if logs are expired
|
|
if task.LogExpired {
|
|
return map[string]any{
|
|
"job_id": jobID,
|
|
"job_name": job.Name,
|
|
"status": job.Status.String(),
|
|
"log_expired": true,
|
|
"message": "Logs have expired",
|
|
}, nil
|
|
}
|
|
|
|
// Determine log filter level
|
|
// Default to "errors" for failed jobs, "all" otherwise
|
|
logLevel := "all"
|
|
if job.Status.String() == "failure" {
|
|
logLevel = "errors"
|
|
}
|
|
|
|
// New "level" parameter takes priority
|
|
if val, ok := args["level"].(string); ok {
|
|
switch val {
|
|
case "errors", "warnings", "all":
|
|
logLevel = val
|
|
}
|
|
}
|
|
|
|
// Backward compat: errors_only overrides if level was not explicitly set
|
|
if _, hasLevel := args["level"].(string); !hasLevel {
|
|
if val, ok := args["errors_only"].(bool); ok {
|
|
if val {
|
|
logLevel = "errors"
|
|
} else {
|
|
logLevel = "all"
|
|
}
|
|
}
|
|
}
|
|
|
|
contextLines := 5
|
|
if val, ok := args["context_lines"].(float64); ok {
|
|
contextLines = int(val)
|
|
}
|
|
|
|
filtering := logLevel != "all"
|
|
|
|
// Get steps for this task
|
|
steps := actions.FullSteps(task)
|
|
|
|
stepLogs := make([]map[string]any, 0, len(steps))
|
|
for i, step := range steps {
|
|
stepInfo := map[string]any{
|
|
"step": i,
|
|
"name": step.Name,
|
|
"status": step.Status.String(),
|
|
"duration": step.Duration().String(),
|
|
}
|
|
|
|
// Read logs for this step
|
|
if step.LogLength > 0 && step.LogIndex < int64(len(task.LogIndexes)) {
|
|
offset := task.LogIndexes[step.LogIndex]
|
|
logRows, err := actions.ReadLogs(ctx, task.LogInStorage, task.LogFilename, offset, step.LogLength)
|
|
if err != nil {
|
|
stepInfo["error"] = fmt.Sprintf("failed to read logs: %v", err)
|
|
} else {
|
|
allLines := make([]string, 0, len(logRows))
|
|
for _, row := range logRows {
|
|
allLines = append(allLines, row.Content)
|
|
}
|
|
|
|
if filtering {
|
|
// Build pattern list based on level
|
|
patterns := logErrorPatterns
|
|
if logLevel == "warnings" {
|
|
patterns = append(patterns, logWarningPatterns...)
|
|
}
|
|
|
|
// Extract matching lines with context
|
|
matchedLines := extractLogLines(allLines, patterns, contextLines)
|
|
if len(matchedLines) > 0 {
|
|
stepInfo["lines"] = matchedLines
|
|
stepInfo["line_count"] = len(matchedLines)
|
|
stepInfo["filtered"] = true
|
|
stepInfo["original_line_count"] = len(allLines)
|
|
} else if step.Status.String() == "failure" {
|
|
// For failed steps with no detected matches, include last N lines
|
|
lastN := min(50, len(allLines))
|
|
stepInfo["lines"] = allLines[len(allLines)-lastN:]
|
|
stepInfo["line_count"] = lastN
|
|
stepInfo["filtered"] = true
|
|
stepInfo["original_line_count"] = len(allLines)
|
|
stepInfo["note"] = "No specific errors detected, showing last 50 lines"
|
|
}
|
|
// Skip successful steps when filtering
|
|
} else {
|
|
stepInfo["lines"] = allLines
|
|
stepInfo["line_count"] = len(allLines)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Only include steps that have content when filtering
|
|
if !filtering || stepInfo["lines"] != nil || step.Status.String() == "failure" {
|
|
stepLogs = append(stepLogs, stepInfo)
|
|
}
|
|
}
|
|
|
|
return map[string]any{
|
|
"job_id": jobID,
|
|
"job_name": job.Name,
|
|
"status": job.Status.String(),
|
|
"task_id": task.ID,
|
|
"log_expired": task.LogExpired,
|
|
"steps": stepLogs,
|
|
"step_count": len(stepLogs),
|
|
"level": logLevel,
|
|
}, nil
|
|
}
|
|
|
|
// logErrorPatterns matches lines that indicate errors (strict).
|
|
var logErrorPatterns = []string{
|
|
"error:", "error[",
|
|
"failed", "FAILED",
|
|
"fatal:", "FATAL:",
|
|
"panic:", "PANIC:",
|
|
"exception:",
|
|
"cannot ",
|
|
"undefined:",
|
|
"permission denied",
|
|
"exit code", "exit status",
|
|
"--- FAIL:",
|
|
"SIGILL", "SIGSEGV", "SIGABRT", "SIGKILL",
|
|
"Build FAILED",
|
|
"error MSB", "error CS", "error TS",
|
|
"npm ERR!",
|
|
}
|
|
|
|
// logWarningPatterns matches lines that indicate warnings (used with "warnings" level).
|
|
var logWarningPatterns = []string{
|
|
"warning:", "warn:",
|
|
"warning MSB", "warning CS", "warning TS",
|
|
"deprecated",
|
|
"not found",
|
|
"go: ", // go module messages (downloads, version info)
|
|
}
|
|
|
|
// extractLogLines finds lines matching the given patterns and includes context around them.
|
|
func extractLogLines(lines, patterns []string, contextLines int) []string {
|
|
// Pre-lowercase the patterns
|
|
lowerPatterns := make([]string, len(patterns))
|
|
for i, p := range patterns {
|
|
lowerPatterns[i] = strings.ToLower(p)
|
|
}
|
|
|
|
// Find indices of matching lines
|
|
matchIndices := make(map[int]bool)
|
|
for i, line := range lines {
|
|
lineLower := strings.ToLower(line)
|
|
for _, pattern := range lowerPatterns {
|
|
if strings.Contains(lineLower, pattern) {
|
|
// Mark this line and surrounding context
|
|
for j := max(0, i-contextLines); j <= min(len(lines)-1, i+contextLines); j++ {
|
|
matchIndices[j] = true
|
|
}
|
|
break
|
|
}
|
|
}
|
|
}
|
|
|
|
if len(matchIndices) == 0 {
|
|
return nil
|
|
}
|
|
|
|
// Collect lines in order, adding separators between non-contiguous sections
|
|
result := make([]string, 0)
|
|
lastIdx := -1
|
|
for i, line := range lines {
|
|
if matchIndices[i] {
|
|
if lastIdx >= 0 && i > lastIdx+1 {
|
|
result = append(result, "--- [skipped lines] ---")
|
|
}
|
|
result = append(result, line)
|
|
lastIdx = i
|
|
}
|
|
}
|
|
|
|
return result
|
|
}
|
|
|
|
func toolCancelWorkflowRun(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and run_id are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
// Check if run is already done
|
|
if run.Status.IsDone() {
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": run.Status.String(),
|
|
"message": "Run is already completed and cannot be cancelled",
|
|
}, nil
|
|
}
|
|
|
|
// Get all jobs for this run
|
|
jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to get jobs: %v", err)
|
|
}
|
|
|
|
// Cancel the jobs
|
|
var cancelledJobs []*actions_model.ActionRunJob
|
|
if err := db.WithTx(ctx, func(txCtx context.Context) error {
|
|
cancelled, err := actions_model.CancelJobs(txCtx, jobs)
|
|
if err != nil {
|
|
return fmt.Errorf("cancel jobs: %w", err)
|
|
}
|
|
cancelledJobs = append(cancelledJobs, cancelled...)
|
|
return nil
|
|
}); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Return result
|
|
cancelledJobIDs := make([]int64, 0, len(cancelledJobs))
|
|
for _, job := range cancelledJobs {
|
|
cancelledJobIDs = append(cancelledJobIDs, job.ID)
|
|
}
|
|
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": "cancelled",
|
|
"cancelled_jobs": cancelledJobIDs,
|
|
"cancelled_count": len(cancelledJobs),
|
|
"message": fmt.Sprintf("Successfully cancelled %d job(s)", len(cancelledJobs)),
|
|
}, nil
|
|
}
|
|
|
|
func toolListReleases(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
if owner == "" || repo == "" {
|
|
return nil, errors.New("owner and repo are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
limit := 10
|
|
if l, ok := args["limit"].(float64); ok {
|
|
limit = int(l)
|
|
}
|
|
|
|
opts := repo_model.FindReleasesOptions{
|
|
RepoID: repository.ID,
|
|
}
|
|
opts.PageSize = limit
|
|
|
|
releases, err := db.Find[repo_model.Release](ctx, opts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
result := make([]map[string]any, 0, len(releases))
|
|
for _, release := range releases {
|
|
r := map[string]any{
|
|
"id": release.ID,
|
|
"tag_name": release.TagName,
|
|
"title": release.Title,
|
|
"is_draft": release.IsDraft,
|
|
"is_prerelease": release.IsPrerelease,
|
|
"created_at": release.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
}
|
|
result = append(result, r)
|
|
}
|
|
|
|
return map[string]any{
|
|
"releases": result,
|
|
"count": len(result),
|
|
"repo": fmt.Sprintf("%s/%s", owner, repo),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetRelease(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
tag, _ := args["tag"].(string)
|
|
|
|
if owner == "" || repo == "" || tag == "" {
|
|
return nil, errors.New("owner, repo, and tag are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
release, err := repo_model.GetRelease(ctx, repository.ID, tag)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("release not found: %s", tag)
|
|
}
|
|
|
|
// Load attachments
|
|
if err := release.LoadAttributes(ctx); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
assets := make([]map[string]any, 0, len(release.Attachments))
|
|
for _, att := range release.Attachments {
|
|
assets = append(assets, map[string]any{
|
|
"id": att.ID,
|
|
"name": att.Name,
|
|
"size": att.Size,
|
|
"download_count": att.DownloadCount,
|
|
"download_url": fmt.Sprintf("%s/%s/%s/releases/download/%s/%s",
|
|
setting.AppURL, owner, repo, tag, att.Name),
|
|
})
|
|
}
|
|
|
|
return map[string]any{
|
|
"id": release.ID,
|
|
"tag_name": release.TagName,
|
|
"title": release.Title,
|
|
"body": release.Note,
|
|
"is_draft": release.IsDraft,
|
|
"is_prerelease": release.IsPrerelease,
|
|
"created_at": release.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"assets": assets,
|
|
"asset_count": len(assets),
|
|
}, nil
|
|
}
|
|
|
|
func toolListSecrets(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
result := map[string]any{
|
|
"global_secrets": []map[string]any{},
|
|
"owner_secrets": []map[string]any{},
|
|
"repo_secrets": []map[string]any{},
|
|
}
|
|
|
|
// Always include global secrets (available to all workflows)
|
|
globalSecrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{Global: true})
|
|
if err != nil {
|
|
log.Error("Failed to fetch global secrets: %v", err)
|
|
} else {
|
|
globalList := make([]map[string]any, 0, len(globalSecrets))
|
|
for _, s := range globalSecrets {
|
|
globalList = append(globalList, map[string]any{
|
|
"name": s.Name,
|
|
"description": s.Description,
|
|
"created_at": s.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"scope": "global",
|
|
})
|
|
}
|
|
result["global_secrets"] = globalList
|
|
}
|
|
|
|
// If owner is specified, get org/user secrets
|
|
if owner != "" {
|
|
ownerUser, err := user_model.GetUserByName(ctx, owner)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("owner not found: %s", owner)
|
|
}
|
|
|
|
ownerSecrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{OwnerID: ownerUser.ID})
|
|
if err != nil {
|
|
log.Error("Failed to fetch owner secrets: %v", err)
|
|
} else {
|
|
ownerList := make([]map[string]any, 0, len(ownerSecrets))
|
|
for _, s := range ownerSecrets {
|
|
scope := "user"
|
|
if ownerUser.IsOrganization() {
|
|
scope = "organization"
|
|
}
|
|
ownerList = append(ownerList, map[string]any{
|
|
"name": s.Name,
|
|
"description": s.Description,
|
|
"created_at": s.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"scope": scope,
|
|
"owner": owner,
|
|
})
|
|
}
|
|
result["owner_secrets"] = ownerList
|
|
}
|
|
|
|
// If repo is also specified, get repo secrets
|
|
if repo != "" {
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
repoSecrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{RepoID: repository.ID})
|
|
if err != nil {
|
|
log.Error("Failed to fetch repo secrets: %v", err)
|
|
} else {
|
|
repoList := make([]map[string]any, 0, len(repoSecrets))
|
|
for _, s := range repoSecrets {
|
|
repoList = append(repoList, map[string]any{
|
|
"name": s.Name,
|
|
"description": s.Description,
|
|
"created_at": s.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"scope": "repository",
|
|
"repo": fmt.Sprintf("%s/%s", owner, repo),
|
|
})
|
|
}
|
|
result["repo_secrets"] = repoList
|
|
}
|
|
}
|
|
}
|
|
|
|
// Add summary counts
|
|
result["total_count"] = len(result["global_secrets"].([]map[string]any)) +
|
|
len(result["owner_secrets"].([]map[string]any)) +
|
|
len(result["repo_secrets"].([]map[string]any))
|
|
|
|
result["note"] = "Secret values are not shown for security. Only names and descriptions are available."
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func toolListPackages(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
pkgType, _ := args["type"].(string)
|
|
|
|
limit := 50
|
|
if l, ok := args["limit"].(float64); ok {
|
|
limit = int(l)
|
|
}
|
|
|
|
result := map[string]any{
|
|
"packages": []map[string]any{},
|
|
}
|
|
|
|
searchOpts := &packages_model.PackageSearchOptions{
|
|
IsInternal: optional.Some(false),
|
|
Paginator: db.NewAbsoluteListOptions(0, limit),
|
|
}
|
|
|
|
if pkgType != "" {
|
|
searchOpts.Type = packages_model.Type(pkgType)
|
|
}
|
|
|
|
var ownerUser *user_model.User
|
|
|
|
if owner == "" {
|
|
// List global packages only
|
|
// Global packages don't have an owner filter - we need to filter by IsGlobal
|
|
// For now, we'll search all and filter
|
|
searchOpts.OwnerID = 0 // This won't work directly, we need a different approach
|
|
} else {
|
|
// Get owner's packages
|
|
var err error
|
|
ownerUser, err = user_model.GetUserByName(ctx, owner)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("owner not found: %s", owner)
|
|
}
|
|
searchOpts.OwnerID = ownerUser.ID
|
|
}
|
|
|
|
// Search for latest versions of packages
|
|
versions, _, err := packages_model.SearchLatestVersions(ctx, searchOpts)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to search packages: %v", err)
|
|
}
|
|
|
|
packageList := make([]map[string]any, 0, len(versions))
|
|
for _, pv := range versions {
|
|
// Get the package info
|
|
pkg, err := packages_model.GetPackageByID(ctx, pv.PackageID)
|
|
if err != nil {
|
|
continue
|
|
}
|
|
|
|
// If listing global packages (no owner), filter to only global ones
|
|
if owner == "" && !pkg.IsGlobal {
|
|
continue
|
|
}
|
|
|
|
pkgInfo := map[string]any{
|
|
"id": pkg.ID,
|
|
"name": pkg.Name,
|
|
"type": string(pkg.Type),
|
|
"type_name": pkg.Type.Name(),
|
|
"latest_version": pv.Version,
|
|
"is_private": pkg.IsPrivate,
|
|
"is_global": pkg.IsGlobal,
|
|
"created_at": pv.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"download_count": pv.DownloadCount,
|
|
}
|
|
|
|
// Add owner info if not global
|
|
if ownerUser != nil {
|
|
pkgInfo["owner"] = owner
|
|
if ownerUser.IsOrganization() {
|
|
pkgInfo["owner_type"] = "organization"
|
|
} else {
|
|
pkgInfo["owner_type"] = "user"
|
|
}
|
|
} else if pkg.IsGlobal {
|
|
pkgInfo["owner"] = "_"
|
|
pkgInfo["owner_type"] = "global"
|
|
}
|
|
|
|
packageList = append(packageList, pkgInfo)
|
|
}
|
|
|
|
result["packages"] = packageList
|
|
result["count"] = len(packageList)
|
|
|
|
if owner == "" {
|
|
result["scope"] = "global"
|
|
} else {
|
|
result["scope"] = owner
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func toolRerunWorkflow(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and run_id are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
// Check if run is done
|
|
if !run.Status.IsDone() {
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": run.Status.String(),
|
|
"message": "Run is not yet completed and cannot be rerun",
|
|
}, nil
|
|
}
|
|
|
|
// Check if workflow is disabled
|
|
cfgUnit := repository.MustGetUnit(ctx, unit.TypeActions)
|
|
cfg := cfgUnit.ActionsConfig()
|
|
if cfg.IsWorkflowDisabled(run.WorkflowID) {
|
|
return nil, errors.New("workflow is disabled")
|
|
}
|
|
|
|
// Reset run's start and stop time
|
|
run.PreviousDuration = run.Duration()
|
|
run.Started = 0
|
|
run.Stopped = 0
|
|
run.Status = actions_model.StatusWaiting
|
|
|
|
vars, err := actions_model.GetVariablesOfRun(ctx, run)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("get run variables: %w", err)
|
|
}
|
|
|
|
if run.RawConcurrency != "" {
|
|
var rawConcurrency model.RawConcurrency
|
|
if err := yaml.Unmarshal([]byte(run.RawConcurrency), &rawConcurrency); err != nil {
|
|
return nil, fmt.Errorf("unmarshal raw concurrency: %w", err)
|
|
}
|
|
|
|
err = actions_service.EvaluateRunConcurrencyFillModel(ctx, run, &rawConcurrency, vars)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("evaluate run concurrency: %w", err)
|
|
}
|
|
|
|
run.Status, err = actions_service.PrepareToStartRunWithConcurrency(ctx, run)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("prepare run with concurrency: %w", err)
|
|
}
|
|
}
|
|
|
|
if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "previous_duration", "status", "concurrency_group", "concurrency_cancel"); err != nil {
|
|
return nil, fmt.Errorf("update run: %w", err)
|
|
}
|
|
|
|
if err := run.LoadAttributes(ctx); err != nil {
|
|
return nil, fmt.Errorf("load run attributes: %w", err)
|
|
}
|
|
notify_service.WorkflowRunStatusUpdate(ctx, run.Repo, run.TriggerUser, run)
|
|
|
|
// Get jobs and rerun them
|
|
jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("get jobs: %w", err)
|
|
}
|
|
|
|
isRunBlocked := run.Status == actions_model.StatusBlocked
|
|
|
|
// Check if specific job_id was requested
|
|
jobIDFloat, hasJobID := args["job_id"].(float64)
|
|
if hasJobID {
|
|
jobID := int64(jobIDFloat)
|
|
// Find the specific job
|
|
var targetJob *actions_model.ActionRunJob
|
|
for _, j := range jobs {
|
|
if j.ID == jobID {
|
|
targetJob = j
|
|
break
|
|
}
|
|
}
|
|
if targetJob == nil {
|
|
return nil, fmt.Errorf("job not found: %d", jobID)
|
|
}
|
|
|
|
// Rerun only the specified job and its dependents
|
|
rerunJobs := actions_service.GetAllRerunJobs(targetJob, jobs)
|
|
rerunCount := 0
|
|
for _, j := range rerunJobs {
|
|
shouldBlockJob := j.ID != targetJob.ID || isRunBlocked
|
|
if err := rerunJobMCP(ctx, j, shouldBlockJob, vars); err != nil {
|
|
return nil, fmt.Errorf("rerun job %d: %w", j.ID, err)
|
|
}
|
|
rerunCount++
|
|
}
|
|
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": "rerun_started",
|
|
"rerun_job_id": jobID,
|
|
"jobs_rerun": rerunCount,
|
|
"message": fmt.Sprintf("Started rerun of job %d and %d dependent jobs", jobID, rerunCount-1),
|
|
}, nil
|
|
}
|
|
|
|
// Rerun all jobs
|
|
rerunCount := 0
|
|
for _, j := range jobs {
|
|
shouldBlockJob := len(j.Needs) > 0 || isRunBlocked
|
|
if err := rerunJobMCP(ctx, j, shouldBlockJob, vars); err != nil {
|
|
return nil, fmt.Errorf("rerun job %d: %w", j.ID, err)
|
|
}
|
|
rerunCount++
|
|
}
|
|
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": "rerun_started",
|
|
"jobs_rerun": rerunCount,
|
|
"message": fmt.Sprintf("Started rerun of all %d jobs", rerunCount),
|
|
}, nil
|
|
}
|
|
|
|
// rerunJobMCP is a helper function to rerun a single job
|
|
func rerunJobMCP(ctx context.Context, job *actions_model.ActionRunJob, shouldBlock bool, vars map[string]string) error {
|
|
status := job.Status
|
|
if !status.IsDone() {
|
|
return nil
|
|
}
|
|
|
|
job.TaskID = 0
|
|
job.Status = util.Iif(shouldBlock, actions_model.StatusBlocked, actions_model.StatusWaiting)
|
|
job.Started = 0
|
|
job.Stopped = 0
|
|
job.ConcurrencyGroup = ""
|
|
job.ConcurrencyCancel = false
|
|
job.IsConcurrencyEvaluated = false
|
|
|
|
if err := job.LoadRun(ctx); err != nil {
|
|
return err
|
|
}
|
|
|
|
if job.RawConcurrency != "" && !shouldBlock {
|
|
err := actions_service.EvaluateJobConcurrencyFillModel(ctx, job.Run, job, vars)
|
|
if err != nil {
|
|
return fmt.Errorf("evaluate job concurrency: %w", err)
|
|
}
|
|
|
|
var err2 error
|
|
job.Status, err2 = actions_service.PrepareToStartJobWithConcurrency(ctx, job)
|
|
if err2 != nil {
|
|
return err2
|
|
}
|
|
}
|
|
|
|
if err := db.WithTx(ctx, func(txCtx context.Context) error {
|
|
updateCols := []string{"task_id", "status", "started", "stopped", "concurrency_group", "concurrency_cancel", "is_concurrency_evaluated"}
|
|
_, err := actions_model.UpdateRunJob(txCtx, job, builder.Eq{"status": status}, updateCols...)
|
|
return err
|
|
}); err != nil {
|
|
return err
|
|
}
|
|
|
|
actions_service.CreateCommitStatusForRunJobs(ctx, job.Run, job)
|
|
notify_service.WorkflowJobStatusUpdate(ctx, job.Run.Repo, job.Run.TriggerUser, job, nil)
|
|
|
|
return nil
|
|
}
|
|
|
|
func toolTriggerWorkflow(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
workflow, _ := args["workflow"].(string)
|
|
ref, _ := args["ref"].(string)
|
|
|
|
if owner == "" || repo == "" || workflow == "" || ref == "" {
|
|
return nil, errors.New("owner, repo, workflow, and ref are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
// Open git repo
|
|
gitRepo, err := git.OpenRepository(ctx, repository.RepoPath())
|
|
if err != nil {
|
|
return nil, fmt.Errorf("open repository: %w", err)
|
|
}
|
|
defer gitRepo.Close()
|
|
|
|
// Get the doer (authenticated user)
|
|
doer := ctx.Doer
|
|
if doer == nil {
|
|
return nil, errors.New("authentication required to trigger workflows")
|
|
}
|
|
|
|
// Process inputs
|
|
inputsArg, _ := args["inputs"].(map[string]any)
|
|
|
|
err = actions_service.DispatchActionWorkflow(ctx, doer, repository, gitRepo, workflow, ref, func(workflowDispatch *model.WorkflowDispatch, inputs map[string]any) error {
|
|
for name, config := range workflowDispatch.Inputs {
|
|
if val, ok := inputsArg[name]; ok {
|
|
inputs[name] = fmt.Sprintf("%v", val)
|
|
} else {
|
|
inputs[name] = config.Default
|
|
}
|
|
}
|
|
return nil
|
|
})
|
|
if err != nil {
|
|
return nil, fmt.Errorf("dispatch workflow: %w", err)
|
|
}
|
|
|
|
return map[string]any{
|
|
"workflow": workflow,
|
|
"ref": ref,
|
|
"status": "triggered",
|
|
"message": fmt.Sprintf("Successfully triggered workflow %s on ref %s", workflow, ref),
|
|
}, nil
|
|
}
|
|
|
|
func toolListArtifacts(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and run_id are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
artifacts, err := actions_model.ListUploadedArtifactsMeta(ctx, run.ID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("list artifacts: %w", err)
|
|
}
|
|
|
|
result := make([]map[string]any, 0, len(artifacts))
|
|
for _, art := range artifacts {
|
|
status := "completed"
|
|
if art.Status == actions_model.ArtifactStatusExpired {
|
|
status = "expired"
|
|
}
|
|
result = append(result, map[string]any{
|
|
"name": art.ArtifactName,
|
|
"size": art.FileSize,
|
|
"status": status,
|
|
"download_url": fmt.Sprintf("%s%s/%s/actions/runs/%d/artifacts/%s", setting.AppURL, owner, repo, run.Index, url.PathEscape(art.ArtifactName)),
|
|
})
|
|
}
|
|
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"artifacts": result,
|
|
"count": len(result),
|
|
}, nil
|
|
}
|
|
|
|
func toolApproveWorkflow(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
|
|
if owner == "" || repo == "" || !ok {
|
|
return nil, errors.New("owner, repo, and run_id are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
if !run.NeedApproval {
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": run.Status.String(),
|
|
"message": "Run does not require approval",
|
|
}, nil
|
|
}
|
|
|
|
doer := ctx.Doer
|
|
if doer == nil {
|
|
return nil, errors.New("authentication required to approve workflows")
|
|
}
|
|
|
|
run.Repo = repository
|
|
run.NeedApproval = false
|
|
run.ApprovedBy = doer.ID
|
|
|
|
if err := actions_model.UpdateRun(ctx, run, "need_approval", "approved_by"); err != nil {
|
|
return nil, fmt.Errorf("update run: %w", err)
|
|
}
|
|
|
|
// Update job statuses
|
|
jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("get jobs: %w", err)
|
|
}
|
|
|
|
updatedCount := 0
|
|
for _, job := range jobs {
|
|
var err error
|
|
job.Status, err = actions_service.PrepareToStartJobWithConcurrency(ctx, job)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("prepare job: %w", err)
|
|
}
|
|
if job.Status == actions_model.StatusWaiting {
|
|
n, err := actions_model.UpdateRunJob(ctx, job, nil, "status")
|
|
if err != nil {
|
|
return nil, fmt.Errorf("update job: %w", err)
|
|
}
|
|
if n > 0 {
|
|
updatedCount++
|
|
}
|
|
}
|
|
}
|
|
|
|
actions_service.CreateCommitStatusForRunJobs(ctx, run, jobs...)
|
|
|
|
return map[string]any{
|
|
"run_id": runID,
|
|
"status": "approved",
|
|
"approved_by": doer.Name,
|
|
"jobs_started": updatedCount,
|
|
"message": fmt.Sprintf("Successfully approved workflow run %d", runID),
|
|
}, nil
|
|
}
|
|
|
|
func toolListWorkflows(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
if owner == "" || repo == "" {
|
|
return nil, errors.New("owner and repo are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
// Open git repo
|
|
gitRepo, err := git.OpenRepository(ctx, repository.RepoPath())
|
|
if err != nil {
|
|
return nil, fmt.Errorf("open repository: %w", err)
|
|
}
|
|
defer gitRepo.Close()
|
|
|
|
// Get ref (default to default branch)
|
|
ref, _ := args["ref"].(string)
|
|
if ref == "" {
|
|
ref = repository.DefaultBranch
|
|
}
|
|
|
|
// Get commit
|
|
commit, err := gitRepo.GetBranchCommit(ref)
|
|
if err != nil {
|
|
// Try as tag
|
|
commit, err = gitRepo.GetTagCommit(ref)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("ref not found: %s", ref)
|
|
}
|
|
}
|
|
|
|
// List workflows
|
|
workflowsPath, entries, err := actions.ListWorkflows(commit)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("list workflows: %w", err)
|
|
}
|
|
|
|
workflows := make([]map[string]any, 0, len(entries))
|
|
for _, entry := range entries {
|
|
wf := map[string]any{
|
|
"name": entry.Name(),
|
|
"path": workflowsPath + "/" + entry.Name(),
|
|
}
|
|
content, contentErr := actions.GetContentFromEntry(entry)
|
|
if contentErr != nil {
|
|
wf["valid"] = false
|
|
wf["errors"] = []string{contentErr.Error()}
|
|
} else {
|
|
validationErrors, _ := validateWorkflowContent(content)
|
|
wf["valid"] = len(validationErrors) == 0
|
|
if len(validationErrors) > 0 {
|
|
wf["errors"] = validationErrors
|
|
}
|
|
}
|
|
workflows = append(workflows, wf)
|
|
}
|
|
|
|
return map[string]any{
|
|
"ref": ref,
|
|
"path": workflowsPath,
|
|
"workflows": workflows,
|
|
"count": len(workflows),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetQueueDepth(ctx *context_service.APIContext, _ map[string]any) (any, error) {
|
|
queueDepth, err := actions_model.GetQueueDepthByLabels(ctx)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("get queue depth: %w", err)
|
|
}
|
|
|
|
labels := make([]map[string]any, 0, len(queueDepth))
|
|
totalJobs := int64(0)
|
|
totalStuck := int64(0)
|
|
|
|
for _, q := range queueDepth {
|
|
totalJobs += q.JobCount
|
|
totalStuck += q.StuckJobs
|
|
|
|
labelInfo := map[string]any{
|
|
"label": q.Label,
|
|
"job_count": q.JobCount,
|
|
"stuck_jobs": q.StuckJobs,
|
|
}
|
|
if q.OldestWait > 0 {
|
|
labelInfo["oldest_wait"] = q.OldestWait.AsTime().Format(time.RFC3339)
|
|
labelInfo["wait_duration"] = time.Since(q.OldestWait.AsTime()).String()
|
|
}
|
|
labels = append(labels, labelInfo)
|
|
}
|
|
|
|
return map[string]any{
|
|
"labels": labels,
|
|
"total_waiting": totalJobs,
|
|
"total_stuck": totalStuck,
|
|
"stuck_threshold": "30 minutes",
|
|
"label_count": len(labels),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetWorkflowFile(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
workflow, _ := args["workflow"].(string)
|
|
|
|
if owner == "" || repo == "" || workflow == "" {
|
|
return nil, errors.New("owner, repo, and workflow are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
// Open git repo
|
|
gitRepo, err := git.OpenRepository(ctx, repository.RepoPath())
|
|
if err != nil {
|
|
return nil, fmt.Errorf("open repository: %w", err)
|
|
}
|
|
defer gitRepo.Close()
|
|
|
|
// Get ref (default to default branch)
|
|
ref, _ := args["ref"].(string)
|
|
if ref == "" {
|
|
ref = repository.DefaultBranch
|
|
}
|
|
|
|
// Get commit
|
|
commit, err := gitRepo.GetBranchCommit(ref)
|
|
if err != nil {
|
|
// Try as tag
|
|
commit, err = gitRepo.GetTagCommit(ref)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("ref not found: %s", ref)
|
|
}
|
|
}
|
|
|
|
// List workflows to find the entry
|
|
workflowsPath, entries, err := actions.ListWorkflows(commit)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("list workflows: %w", err)
|
|
}
|
|
|
|
var targetEntry *git.TreeEntry
|
|
for _, entry := range entries {
|
|
if entry.Name() == workflow {
|
|
targetEntry = entry
|
|
break
|
|
}
|
|
}
|
|
|
|
if targetEntry == nil {
|
|
return nil, fmt.Errorf("workflow not found: %s", workflow)
|
|
}
|
|
|
|
// Get content
|
|
content, err := actions.GetContentFromEntry(targetEntry)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read workflow file: %w", err)
|
|
}
|
|
|
|
return map[string]any{
|
|
"workflow": workflow,
|
|
"ref": ref,
|
|
"path": workflowsPath + "/" + workflow,
|
|
"content": string(content),
|
|
}, nil
|
|
}
|
|
|
|
func toolGetArtifactDownloadURL(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
runIDFloat, ok := args["run_id"].(float64)
|
|
artifactName, _ := args["artifact_name"].(string)
|
|
|
|
if owner == "" || repo == "" || !ok || artifactName == "" {
|
|
return nil, errors.New("owner, repo, run_id, and artifact_name are required")
|
|
}
|
|
runID := int64(runIDFloat)
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
run, err := actions_model.GetRunByRepoAndID(ctx, repository.ID, runID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("run not found: %d", runID)
|
|
}
|
|
|
|
// Check if artifact exists
|
|
artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
|
|
RunID: run.ID,
|
|
ArtifactName: artifactName,
|
|
})
|
|
if err != nil {
|
|
return nil, fmt.Errorf("find artifact: %w", err)
|
|
}
|
|
|
|
if len(artifacts) == 0 {
|
|
return nil, fmt.Errorf("artifact not found: %s", artifactName)
|
|
}
|
|
|
|
// Check status
|
|
art := artifacts[0]
|
|
if art.Status == actions_model.ArtifactStatusExpired {
|
|
return map[string]any{
|
|
"artifact_name": artifactName,
|
|
"status": "expired",
|
|
"message": "Artifact has expired and is no longer available for download",
|
|
}, nil
|
|
}
|
|
|
|
if art.Status != actions_model.ArtifactStatusUploadConfirmed {
|
|
return map[string]any{
|
|
"artifact_name": artifactName,
|
|
"status": "pending",
|
|
"message": "Artifact upload is not yet complete",
|
|
}, nil
|
|
}
|
|
|
|
downloadURL := fmt.Sprintf("%s%s/%s/actions/runs/%d/artifacts/%s",
|
|
setting.AppURL, owner, repo, run.Index, url.PathEscape(artifactName))
|
|
|
|
return map[string]any{
|
|
"artifact_name": artifactName,
|
|
"status": "available",
|
|
"size": art.FileSize,
|
|
"download_url": downloadURL,
|
|
}, nil
|
|
}
|
|
|
|
func toolGetPackageDefaults(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
|
|
if owner == "" {
|
|
return nil, errors.New("owner is required")
|
|
}
|
|
|
|
ownerUser, err := user_model.GetUserByName(ctx, owner)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("owner not found: %s", owner)
|
|
}
|
|
|
|
defaults, err := packages_model.GetPackageDefaultsByOwnerID(ctx, ownerUser.ID)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to get package defaults: %w", err)
|
|
}
|
|
|
|
result := map[string]any{
|
|
"owner": owner,
|
|
"authors": defaults.Authors,
|
|
"company": defaults.Company,
|
|
"copyright": defaults.Copyright,
|
|
}
|
|
|
|
// Fill in repo-specific URLs if repo is provided
|
|
if repo != "" {
|
|
baseURL := setting.AppURL + owner + "/" + repo
|
|
result["repository_url"] = baseURL
|
|
result["package_project_url"] = baseURL
|
|
}
|
|
|
|
// Include icon URL if an icon has been uploaded
|
|
if defaults.IconPath != "" {
|
|
result["icon_url"] = setting.AppURL + owner + "/-/package-icon"
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func toolListRepos(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
if owner == "" {
|
|
return nil, errors.New("owner is required")
|
|
}
|
|
|
|
limit := 50
|
|
if l, ok := args["limit"].(float64); ok && l > 0 {
|
|
limit = min(int(l), 200)
|
|
}
|
|
|
|
ownerUser, err := user_model.GetUserByName(ctx, owner)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("owner not found: %s", owner)
|
|
}
|
|
|
|
repos, count, err := repo_model.SearchRepository(ctx, repo_model.SearchRepoOptions{
|
|
ListOptions: db.ListOptions{
|
|
Page: 1,
|
|
PageSize: limit,
|
|
},
|
|
Actor: ctx.Doer,
|
|
OwnerID: ownerUser.ID,
|
|
Private: ctx.Doer != nil,
|
|
OrderBy: db.SearchOrderByAlphabetically,
|
|
Archived: optional.Some(false),
|
|
})
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to search repositories: %w", err)
|
|
}
|
|
|
|
items := make([]map[string]any, 0, len(repos))
|
|
for _, repo := range repos {
|
|
items = append(items, map[string]any{
|
|
"name": repo.Name,
|
|
"full_name": repo.FullName(),
|
|
"description": repo.Description,
|
|
"default_branch": repo.DefaultBranch,
|
|
"private": repo.IsPrivate,
|
|
"fork": repo.IsFork,
|
|
"mirror": repo.IsMirror,
|
|
"archived": repo.IsArchived,
|
|
"url": setting.AppURL + repo.FullName(),
|
|
"stars": repo.NumStars,
|
|
"forks": repo.NumForks,
|
|
})
|
|
}
|
|
|
|
return map[string]any{
|
|
"owner": owner,
|
|
"total_count": count,
|
|
"repositories": items,
|
|
}, nil
|
|
}
|
|
|
|
func toolListIssues(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
if owner == "" || repo == "" {
|
|
return nil, errors.New("owner and repo are required")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
page := 1
|
|
if p, ok := args["page"].(float64); ok && p > 0 {
|
|
page = int(p)
|
|
}
|
|
|
|
limit := 20
|
|
if l, ok := args["limit"].(float64); ok && l > 0 {
|
|
limit = min(int(l), 100)
|
|
}
|
|
|
|
opts := &issue_model.IssuesOptions{
|
|
Paginator: &db.ListOptions{
|
|
Page: page,
|
|
PageSize: limit,
|
|
},
|
|
RepoIDs: []int64{repository.ID},
|
|
IsPull: optional.Some(false),
|
|
SortType: "newest",
|
|
}
|
|
|
|
state, _ := args["state"].(string)
|
|
switch state {
|
|
case "closed":
|
|
opts.IsClosed = optional.Some(true)
|
|
case "all":
|
|
// no filter
|
|
default:
|
|
opts.IsClosed = optional.Some(false)
|
|
}
|
|
|
|
issues, err := issue_model.Issues(ctx, opts)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to list issues: %w", err)
|
|
}
|
|
|
|
totalCount, err := issue_model.CountIssues(ctx, opts)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to count issues: %w", err)
|
|
}
|
|
|
|
if err := issues.LoadPosters(ctx); err != nil {
|
|
return nil, fmt.Errorf("failed to load posters: %w", err)
|
|
}
|
|
if err := issues.LoadLabels(ctx); err != nil {
|
|
return nil, fmt.Errorf("failed to load labels: %w", err)
|
|
}
|
|
|
|
items := make([]map[string]any, 0, len(issues))
|
|
for _, issue := range issues {
|
|
labels := make([]string, 0, len(issue.Labels))
|
|
for _, l := range issue.Labels {
|
|
labels = append(labels, l.Name)
|
|
}
|
|
|
|
posterName := ""
|
|
if issue.Poster != nil {
|
|
posterName = issue.Poster.Name
|
|
}
|
|
|
|
issueState := "open"
|
|
if issue.IsClosed {
|
|
issueState = "closed"
|
|
}
|
|
|
|
items = append(items, map[string]any{
|
|
"number": issue.Index,
|
|
"title": issue.Title,
|
|
"state": issueState,
|
|
"poster": posterName,
|
|
"labels": labels,
|
|
"num_comments": issue.NumComments,
|
|
"created_at": issue.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"updated_at": issue.UpdatedUnix.AsTime().Format(time.RFC3339),
|
|
"url": fmt.Sprintf("%s%s/%s/issues/%d", setting.AppURL, owner, repo, issue.Index),
|
|
})
|
|
}
|
|
|
|
totalPages := (int(totalCount) + limit - 1) / limit
|
|
|
|
return map[string]any{
|
|
"repository": fmt.Sprintf("%s/%s", owner, repo),
|
|
"total_count": totalCount,
|
|
"page": page,
|
|
"page_size": limit,
|
|
"total_pages": totalPages,
|
|
"issues": items,
|
|
}, nil
|
|
}
|
|
|
|
func toolGetIssue(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
if owner == "" || repo == "" {
|
|
return nil, errors.New("owner and repo are required")
|
|
}
|
|
|
|
number, ok := args["number"].(float64)
|
|
if !ok || number < 1 {
|
|
return nil, errors.New("number is required and must be a positive integer")
|
|
}
|
|
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
issue, err := issue_model.GetIssueByIndex(ctx, repository.ID, int64(number))
|
|
if err != nil {
|
|
return nil, fmt.Errorf("issue #%d not found", int64(number))
|
|
}
|
|
|
|
if err := issue.LoadPoster(ctx); err != nil {
|
|
return nil, fmt.Errorf("failed to load poster: %w", err)
|
|
}
|
|
if err := issue.LoadLabels(ctx); err != nil {
|
|
return nil, fmt.Errorf("failed to load labels: %w", err)
|
|
}
|
|
|
|
labels := make([]string, 0, len(issue.Labels))
|
|
for _, l := range issue.Labels {
|
|
labels = append(labels, l.Name)
|
|
}
|
|
|
|
posterName := ""
|
|
if issue.Poster != nil {
|
|
posterName = issue.Poster.Name
|
|
}
|
|
|
|
state := "open"
|
|
if issue.IsClosed {
|
|
state = "closed"
|
|
}
|
|
|
|
result := map[string]any{
|
|
"number": issue.Index,
|
|
"title": issue.Title,
|
|
"state": state,
|
|
"body": issue.Content,
|
|
"poster": posterName,
|
|
"labels": labels,
|
|
"num_comments": issue.NumComments,
|
|
"created_at": issue.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
"updated_at": issue.UpdatedUnix.AsTime().Format(time.RFC3339),
|
|
"url": fmt.Sprintf("%s%s/%s/issues/%d", setting.AppURL, owner, repo, issue.Index),
|
|
}
|
|
|
|
if issue.IsClosed && !issue.ClosedUnix.IsZero() {
|
|
result["closed_at"] = issue.ClosedUnix.AsTime().Format(time.RFC3339)
|
|
}
|
|
|
|
// Load comments
|
|
comments, err := issue_model.FindComments(ctx, &issue_model.FindCommentsOptions{
|
|
IssueID: issue.ID,
|
|
Type: issue_model.CommentTypeComment,
|
|
})
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to load comments: %w", err)
|
|
}
|
|
|
|
if len(comments) > 0 {
|
|
if err := comments.LoadPosters(ctx); err != nil {
|
|
return nil, fmt.Errorf("failed to load comment posters: %w", err)
|
|
}
|
|
|
|
commentItems := make([]map[string]any, 0, len(comments))
|
|
for _, c := range comments {
|
|
cPoster := ""
|
|
if c.Poster != nil {
|
|
cPoster = c.Poster.Name
|
|
}
|
|
commentItems = append(commentItems, map[string]any{
|
|
"id": c.ID,
|
|
"body": c.Content,
|
|
"poster": cPoster,
|
|
"created_at": c.CreatedUnix.AsTime().Format(time.RFC3339),
|
|
})
|
|
}
|
|
result["comments"] = commentItems
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
// validateWorkflowContent runs YAML and workflow-structural validation on raw workflow content.
|
|
// Returns a list of error strings (empty means valid) and a list of warning strings.
|
|
func validateWorkflowContent(content []byte) (errs, warnings []string) {
|
|
// 1. Basic YAML syntax check
|
|
var rawYAML map[string]any
|
|
if err := yaml.Unmarshal(content, &rawYAML); err != nil {
|
|
return []string{err.Error()}, nil
|
|
}
|
|
|
|
// 2. Workflow structural parse via nektos/act
|
|
workflow, err := model.ReadWorkflow(bytes.NewReader(content))
|
|
if err != nil {
|
|
return []string{err.Error()}, nil
|
|
}
|
|
|
|
// 3. Event/trigger parse
|
|
if _, err := jobparser.ParseRawOn(&workflow.RawOn); err != nil {
|
|
errs = append(errs, "invalid 'on' triggers: "+err.Error())
|
|
}
|
|
|
|
// 4. Job-level checks
|
|
for jobID, job := range workflow.Jobs {
|
|
if len(job.RunsOn()) == 0 {
|
|
warnings = append(warnings, fmt.Sprintf("job '%s' has no runs-on field", jobID))
|
|
}
|
|
if len(job.Steps) == 0 {
|
|
warnings = append(warnings, fmt.Sprintf("job '%s' has no steps", jobID))
|
|
}
|
|
}
|
|
|
|
return errs, warnings
|
|
}
|
|
|
|
func toolValidateWorkflow(ctx *context_service.APIContext, args map[string]any) (any, error) {
|
|
contentStr, hasContent := args["content"].(string)
|
|
owner, _ := args["owner"].(string)
|
|
repo, _ := args["repo"].(string)
|
|
workflow, _ := args["workflow"].(string)
|
|
|
|
var content []byte
|
|
var workflowName, ref, path string
|
|
|
|
if hasContent && contentStr != "" {
|
|
// Mode 1: validate raw YAML content directly
|
|
content = []byte(contentStr)
|
|
} else if owner != "" && repo != "" && workflow != "" {
|
|
// Mode 2: fetch from repo and validate
|
|
repository, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner, repo)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("repository not found: %s/%s", owner, repo)
|
|
}
|
|
|
|
gitRepo, err := git.OpenRepository(ctx, repository.RepoPath())
|
|
if err != nil {
|
|
return nil, fmt.Errorf("open repository: %w", err)
|
|
}
|
|
defer gitRepo.Close()
|
|
|
|
ref, _ = args["ref"].(string)
|
|
if ref == "" {
|
|
ref = repository.DefaultBranch
|
|
}
|
|
|
|
commit, err := gitRepo.GetBranchCommit(ref)
|
|
if err != nil {
|
|
commit, err = gitRepo.GetTagCommit(ref)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("ref not found: %s", ref)
|
|
}
|
|
}
|
|
|
|
workflowsPath, entries, err := actions.ListWorkflows(commit)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("list workflows: %w", err)
|
|
}
|
|
|
|
var targetEntry *git.TreeEntry
|
|
for _, entry := range entries {
|
|
if entry.Name() == workflow {
|
|
targetEntry = entry
|
|
break
|
|
}
|
|
}
|
|
if targetEntry == nil {
|
|
return nil, fmt.Errorf("workflow not found: %s", workflow)
|
|
}
|
|
|
|
content, err = actions.GetContentFromEntry(targetEntry)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("read workflow file: %w", err)
|
|
}
|
|
|
|
workflowName = workflow
|
|
path = workflowsPath + "/" + workflow
|
|
} else {
|
|
return nil, errors.New("provide either 'content' (raw YAML) or 'owner'+'repo'+'workflow' to validate from repository")
|
|
}
|
|
|
|
validationErrors, validationWarnings := validateWorkflowContent(content)
|
|
|
|
result := map[string]any{
|
|
"valid": len(validationErrors) == 0,
|
|
}
|
|
|
|
if workflowName != "" {
|
|
result["workflow"] = workflowName
|
|
result["ref"] = ref
|
|
result["path"] = path
|
|
}
|
|
|
|
if len(validationErrors) > 0 {
|
|
errorObjects := make([]map[string]any, 0, len(validationErrors))
|
|
for _, e := range validationErrors {
|
|
errorObjects = append(errorObjects, map[string]any{"message": e})
|
|
}
|
|
result["errors"] = errorObjects
|
|
}
|
|
|
|
if len(validationWarnings) > 0 {
|
|
warningObjects := make([]map[string]any, 0, len(validationWarnings))
|
|
for _, w := range validationWarnings {
|
|
warningObjects = append(warningObjects, map[string]any{"message": w})
|
|
}
|
|
result["warnings"] = warningObjects
|
|
}
|
|
|
|
// If valid, extract summary info
|
|
if len(validationErrors) == 0 {
|
|
wf, err := model.ReadWorkflow(bytes.NewReader(content))
|
|
if err == nil {
|
|
result["jobs_found"] = len(wf.Jobs)
|
|
events, err := jobparser.ParseRawOn(&wf.RawOn)
|
|
if err == nil {
|
|
eventNames := make([]string, 0, len(events))
|
|
for _, ev := range events {
|
|
eventNames = append(eventNames, ev.Name)
|
|
}
|
|
result["events_found"] = eventNames
|
|
}
|
|
}
|
|
}
|
|
|
|
return result, nil
|
|
}
|