Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
326 changes: 317 additions & 9 deletions .github/workflows/smoke-claude.lock.yml

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions .github/workflows/smoke-claude.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ engine:
id: claude
max-turns: 100
strict: true
inlined-imports: true
imports:
- shared/mcp-pagination.md
- shared/gh.md
Expand Down
72 changes: 65 additions & 7 deletions pkg/parser/frontmatter_hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,19 @@ import (

var frontmatterHashLog = logger.New("parser:frontmatter_hash")

// parseBoolFromFrontmatter extracts a boolean value from a frontmatter map.
// Returns false if the key is absent, the map is nil, or the value is not a bool.
func parseBoolFromFrontmatter(m map[string]any, key string) bool {
if m == nil {
return false
}
if v, ok := m[key]; ok {
b, _ := v.(bool)
return b
}
return false
}

// FileReader is a function type that reads file content
// This abstraction allows for different file reading strategies (disk, GitHub API, in-memory, etc.)
type FileReader func(filePath string) ([]byte, error)
Expand Down Expand Up @@ -261,8 +274,24 @@ func ComputeFrontmatterHashFromFile(filePath string, cache *ImportCache) (string
return ComputeFrontmatterHashFromFileWithReader(filePath, cache, DefaultFileReader)
}

// ComputeFrontmatterHashFromFileWithParsedFrontmatter computes the frontmatter hash using
// a pre-parsed frontmatter map. The parsedFrontmatter must not be nil; callers are responsible
// for parsing the frontmatter before calling this function.
func ComputeFrontmatterHashFromFileWithParsedFrontmatter(filePath string, parsedFrontmatter map[string]any, cache *ImportCache, fileReader FileReader) (string, error) {
frontmatterHashLog.Printf("Computing hash for file: %s", filePath)

// Read file content using the provided file reader
content, err := fileReader(filePath)
if err != nil {
return "", fmt.Errorf("failed to read file: %w", err)
}

return computeFrontmatterHashFromContent(string(content), parsedFrontmatter, filePath, cache, fileReader)
}

// ComputeFrontmatterHashFromFileWithReader computes the frontmatter hash for a workflow file
// using a custom file reader function (e.g., for GitHub API, in-memory file system, etc.)
// It parses the frontmatter once from the file content, then delegates to the core logic.
func ComputeFrontmatterHashFromFileWithReader(filePath string, cache *ImportCache, fileReader FileReader) (string, error) {
frontmatterHashLog.Printf("Computing hash for file: %s", filePath)

Expand All @@ -272,20 +301,44 @@ func ComputeFrontmatterHashFromFileWithReader(filePath string, cache *ImportCach
return "", fmt.Errorf("failed to read file: %w", err)
}

// Parse frontmatter once from content; treat inlined-imports as false if parsing fails
var parsedFrontmatter map[string]any
if parsed, parseErr := ExtractFrontmatterFromContent(string(content)); parseErr == nil {
parsedFrontmatter = parsed.Frontmatter
}

return computeFrontmatterHashFromContent(string(content), parsedFrontmatter, filePath, cache, fileReader)
}

// computeFrontmatterHashFromContent is the shared core that computes the hash given the
// already-read file content and pre-parsed frontmatter map (may be nil).
func computeFrontmatterHashFromContent(content string, parsedFrontmatter map[string]any, filePath string, cache *ImportCache, fileReader FileReader) (string, error) {
// Extract frontmatter and markdown as text (no YAML parsing)
frontmatterText, markdown, err := extractFrontmatterAndBodyText(string(content))
frontmatterText, markdown, err := extractFrontmatterAndBodyText(content)
if err != nil {
return "", fmt.Errorf("failed to extract frontmatter: %w", err)
}

// Get base directory for resolving imports
baseDir := filepath.Dir(filePath)

// Extract relevant template expressions from markdown body
relevantExpressions := extractRelevantTemplateExpressions(markdown)
// Detect inlined-imports from the pre-parsed frontmatter map.
// If nil (parsing failed or not provided), inlined-imports is treated as false.
inlinedImports := parseBoolFromFrontmatter(parsedFrontmatter, "inlined-imports")

// When inlined-imports is enabled, the entire markdown body is compiled into the lock
// file, so any change to the body must invalidate the hash. Include the full body text.
// Otherwise, only extract the relevant template expressions (env./vars. references).
var relevantExpressions []string
var fullBody string
if inlinedImports {
fullBody = normalizeFrontmatterText(markdown)
} else {
relevantExpressions = extractRelevantTemplateExpressions(markdown)
}

// Compute hash using text-based approach with custom file reader
return computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDir, cache, relevantExpressions, fileReader)
return computeFrontmatterHashTextBasedWithReader(frontmatterText, fullBody, baseDir, cache, relevantExpressions, fileReader)
}

// ComputeFrontmatterHashWithExpressions computes the hash including template expressions
Expand Down Expand Up @@ -517,7 +570,9 @@ func processImportsTextBased(frontmatterText, baseDir string, visited map[string
return importedFiles, importedFrontmatterTexts, nil
}

// computeFrontmatterHashTextBasedWithReader computes the hash using text-based approach with custom file reader
// computeFrontmatterHashTextBasedWithReader computes the hash using text-based approach with custom file reader.
// When markdown is non-empty, it is included as the full body text in the canonical data (used for
// inlined-imports mode where the entire body is compiled into the lock file).
func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDir string, cache *ImportCache, expressions []string, fileReader FileReader) (string, error) {
frontmatterHashLog.Print("Computing frontmatter hash using text-based approach")

Expand Down Expand Up @@ -553,8 +608,11 @@ func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDi
canonical["imported-frontmatters"] = strings.Join(normalizedTexts, "\n---\n")
}

// Add template expressions if present
if len(expressions) > 0 {
// When inlined-imports is enabled, include the full markdown body so any content
// change invalidates the hash. Otherwise, include only relevant template expressions.
if markdown != "" {
canonical["body-text"] = markdown
} else if len(expressions) > 0 {
canonical["template-expressions"] = expressions
}

Expand Down
6 changes: 6 additions & 0 deletions pkg/parser/schemas/main_workflow_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,12 @@
]
]
},
"inlined-imports": {
"type": "boolean",
"default": false,
"description": "If true, inline all imports (including those without inputs) at compilation time in the generated lock.yml instead of using runtime-import macros. When enabled, the frontmatter hash covers the entire markdown body so any change to the content will invalidate the hash.",
"examples": [true, false]
},
"on": {
"description": "Workflow triggers that define when the agentic workflow should run. Supports standard GitHub Actions trigger events plus special command triggers for /commands (required)",
"examples": [
Expand Down
24 changes: 22 additions & 2 deletions pkg/workflow/compiler_orchestrator_workflow.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,17 @@ func (c *Compiler) buildInitialWorkflowData(
) *WorkflowData {
orchestratorWorkflowLog.Print("Building initial workflow data")

inlinedImports := resolveInlinedImports(result.Frontmatter)

// When inlined-imports is true, agent file content is already inlined via ImportPaths → step 1b.
// Clear AgentFile/AgentImportSpec so engines don't read it from disk separately at runtime.
agentFile := importsResult.AgentFile
agentImportSpec := importsResult.AgentImportSpec
if inlinedImports {
agentFile = ""
agentImportSpec = ""
}

return &WorkflowData{
Name: toolsResult.workflowName,
FrontmatterName: toolsResult.frontmatterName,
Expand All @@ -138,8 +149,8 @@ func (c *Compiler) buildInitialWorkflowData(
MarkdownContent: toolsResult.markdownContent,
AI: engineSetup.engineSetting,
EngineConfig: engineSetup.engineConfig,
AgentFile: importsResult.AgentFile,
AgentImportSpec: importsResult.AgentImportSpec,
AgentFile: agentFile,
AgentImportSpec: agentImportSpec,
RepositoryImports: importsResult.RepositoryImports,
NetworkPermissions: engineSetup.networkPermissions,
SandboxConfig: applySandboxDefaults(engineSetup.sandboxConfig, engineSetup.engineConfig),
Expand All @@ -151,11 +162,20 @@ func (c *Compiler) buildInitialWorkflowData(
StrictMode: c.strictMode,
SecretMasking: toolsResult.secretMasking,
ParsedFrontmatter: toolsResult.parsedFrontmatter,
RawFrontmatter: result.Frontmatter,
HasExplicitGitHubTool: toolsResult.hasExplicitGitHubTool,
ActionMode: c.actionMode,
InlinedImports: inlinedImports,
}
}

// resolveInlinedImports returns true if inlined-imports is enabled.
// It reads the value directly from the raw (pre-parsed) frontmatter map, which is always
// populated regardless of whether ParseFrontmatterConfig succeeded.
func resolveInlinedImports(rawFrontmatter map[string]any) bool {
return ParseBoolFromConfig(rawFrontmatter, "inlined-imports", nil)
}

// extractYAMLSections extracts YAML configuration sections from frontmatter
func (c *Compiler) extractYAMLSections(frontmatter map[string]any, workflowData *WorkflowData) {
orchestratorWorkflowLog.Print("Extracting YAML sections from frontmatter")
Expand Down
2 changes: 2 additions & 0 deletions pkg/workflow/compiler_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -457,9 +457,11 @@ type WorkflowData struct {
StrictMode bool // strict mode for action pinning
SecretMasking *SecretMaskingConfig // secret masking configuration
ParsedFrontmatter *FrontmatterConfig // cached parsed frontmatter configuration (for performance optimization)
RawFrontmatter map[string]any // raw parsed frontmatter map (for passing to hash functions without re-parsing)
ActionPinWarnings map[string]bool // cache of already-warned action pin failures (key: "repo@version")
ActionMode ActionMode // action mode for workflow compilation (dev, release, script)
HasExplicitGitHubTool bool // true if tools.github was explicitly configured in frontmatter
InlinedImports bool // if true, inline all imports at compile time (from inlined-imports frontmatter field)
}

// BaseSafeOutputConfig holds common configuration fields for all safe output types
Expand Down
116 changes: 83 additions & 33 deletions pkg/workflow/compiler_yaml.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package workflow
import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
Expand Down Expand Up @@ -107,6 +108,12 @@ func (c *Compiler) generateWorkflowHeader(yaml *strings.Builder, data *WorkflowD
}
}

// Add inlined-imports comment to indicate the field was used at compile time
if data.InlinedImports {
yaml.WriteString("#\n")
yaml.WriteString("# inlined-imports: true\n")
}

// Add lock metadata (schema version + frontmatter hash + stop time) as JSON
// Single-line format to minimize merge conflicts and be unaffected by LOC changes
if frontmatterHash != "" {
Expand Down Expand Up @@ -182,7 +189,7 @@ func (c *Compiler) generateYAML(data *WorkflowData, markdownPath string) (string
if markdownPath != "" {
baseDir := filepath.Dir(markdownPath)
cache := parser.NewImportCache(baseDir)
hash, err := parser.ComputeFrontmatterHashFromFile(markdownPath, cache)
hash, err := parser.ComputeFrontmatterHashFromFileWithParsedFrontmatter(markdownPath, data.RawFrontmatter, cache, parser.DefaultFileReader)
if err != nil {
compilerYamlLog.Printf("Warning: failed to compute frontmatter hash: %v", err)
// Continue without hash - non-fatal error
Expand Down Expand Up @@ -268,42 +275,52 @@ func (c *Compiler) generatePrompt(yaml *strings.Builder, data *WorkflowData) {
if data.ImportedMarkdown != "" {
compilerYamlLog.Printf("Processing imported markdown (%d bytes)", len(data.ImportedMarkdown))

// Clean and process imported markdown
cleanedImportedMarkdown := removeXMLComments(data.ImportedMarkdown)

// Substitute import inputs in imported content
// Clean, substitute, and post-process imported markdown
cleaned := removeXMLComments(data.ImportedMarkdown)
if len(data.ImportInputs) > 0 {
compilerYamlLog.Printf("Substituting %d import input values", len(data.ImportInputs))
cleanedImportedMarkdown = SubstituteImportInputs(cleanedImportedMarkdown, data.ImportInputs)
}

// Wrap GitHub expressions in template conditionals
cleanedImportedMarkdown = wrapExpressionsInTemplateConditionals(cleanedImportedMarkdown)

// Extract expressions from imported content
extractor := NewExpressionExtractor()
importedExprMappings, err := extractor.ExtractExpressions(cleanedImportedMarkdown)
if err == nil && len(importedExprMappings) > 0 {
cleanedImportedMarkdown = extractor.ReplaceExpressionsWithEnvVars(cleanedImportedMarkdown)
expressionMappings = importedExprMappings
cleaned = SubstituteImportInputs(cleaned, data.ImportInputs)
}

// Split imported content into chunks and add to user prompt
importedChunks := splitContentIntoChunks(cleanedImportedMarkdown)
userPromptChunks = append(userPromptChunks, importedChunks...)
compilerYamlLog.Printf("Inlined imported markdown with inputs in %d chunks", len(importedChunks))
chunks, exprMaps := processMarkdownBody(cleaned)
userPromptChunks = append(userPromptChunks, chunks...)
expressionMappings = exprMaps
compilerYamlLog.Printf("Inlined imported markdown with inputs in %d chunks", len(chunks))
}

// Step 1b: Generate runtime-import macros for imported markdown without inputs
// These imports don't need compile-time substitution, so they can be loaded at runtime
// Step 1b: For imports without inputs:
// - inlinedImports mode (inlined-imports: true frontmatter): read and inline content at compile time
// - normal mode: generate runtime-import macros (loaded at runtime)
if len(data.ImportPaths) > 0 {
compilerYamlLog.Printf("Generating runtime-import macros for %d imports without inputs", len(data.ImportPaths))
for _, importPath := range data.ImportPaths {
// Normalize to Unix paths (forward slashes) for cross-platform compatibility
importPath = filepath.ToSlash(importPath)
runtimeImportMacro := fmt.Sprintf("{{#runtime-import %s}}", importPath)
userPromptChunks = append(userPromptChunks, runtimeImportMacro)
compilerYamlLog.Printf("Added runtime-import macro for: %s", importPath)
if data.InlinedImports && c.markdownPath != "" {
// inlinedImports mode: read import file content from disk and embed directly
compilerYamlLog.Printf("Inlining %d imports without inputs at compile time", len(data.ImportPaths))
workspaceRoot := resolveWorkspaceRoot(c.markdownPath)
for _, importPath := range data.ImportPaths {
importPath = filepath.ToSlash(importPath)
rawContent, err := os.ReadFile(filepath.Join(workspaceRoot, importPath))
if err != nil {
// Fall back to runtime-import macro if file cannot be read
compilerYamlLog.Printf("Warning: failed to read import file %s (%v), falling back to runtime-import", importPath, err)
userPromptChunks = append(userPromptChunks, fmt.Sprintf("{{#runtime-import %s}}", importPath))
continue
}
importedBody, extractErr := parser.ExtractMarkdownContent(string(rawContent))
if extractErr != nil {
importedBody = string(rawContent)
}
chunks, exprMaps := processMarkdownBody(importedBody)
userPromptChunks = append(userPromptChunks, chunks...)
expressionMappings = append(expressionMappings, exprMaps...)
compilerYamlLog.Printf("Inlined import without inputs: %s", importPath)
}
} else {
// Normal mode: generate runtime-import macros (loaded at workflow runtime)
compilerYamlLog.Printf("Generating runtime-import macros for %d imports without inputs", len(data.ImportPaths))
for _, importPath := range data.ImportPaths {
importPath = filepath.ToSlash(importPath)
userPromptChunks = append(userPromptChunks, fmt.Sprintf("{{#runtime-import %s}}", importPath))
compilerYamlLog.Printf("Added runtime-import macro for: %s", importPath)
}
}
}

Expand All @@ -313,7 +330,7 @@ func (c *Compiler) generatePrompt(yaml *strings.Builder, data *WorkflowData) {
// available at compile time for the substitute placeholders step
// Use MainWorkflowMarkdown (not MarkdownContent) to avoid extracting from imported content
// Skip this step when inlinePrompt is true because expression extraction happens in Step 2
if !c.inlinePrompt && data.MainWorkflowMarkdown != "" {
if !c.inlinePrompt && !data.InlinedImports && data.MainWorkflowMarkdown != "" {
compilerYamlLog.Printf("Extracting expressions from main workflow markdown (%d bytes)", len(data.MainWorkflowMarkdown))

// Create a new extractor for main workflow markdown
Expand All @@ -327,7 +344,7 @@ func (c *Compiler) generatePrompt(yaml *strings.Builder, data *WorkflowData) {
}

// Step 2: Add main workflow markdown content to the prompt
if c.inlinePrompt {
if c.inlinePrompt || data.InlinedImports {
// Inline mode (Wasm/browser): embed the markdown content directly in the YAML
// since runtime-import macros cannot resolve without filesystem access
if data.MainWorkflowMarkdown != "" {
Expand Down Expand Up @@ -712,3 +729,36 @@ func (c *Compiler) generateOutputCollectionStep(yaml *strings.Builder, data *Wor
yaml.WriteString(" if-no-files-found: warn\n")

}

// processMarkdownBody applies the standard post-processing pipeline to a markdown body:
// XML comment removal, expression wrapping, expression extraction/substitution, and chunking.
// It returns the prompt chunks and expression mappings extracted from the content.
func processMarkdownBody(body string) ([]string, []*ExpressionMapping) {
body = removeXMLComments(body)
body = wrapExpressionsInTemplateConditionals(body)
extractor := NewExpressionExtractor()
exprMappings, err := extractor.ExtractExpressions(body)
if err == nil && len(exprMappings) > 0 {
body = extractor.ReplaceExpressionsWithEnvVars(body)
} else {
exprMappings = nil
}
return splitContentIntoChunks(body), exprMappings
}

// resolveWorkspaceRoot returns the workspace root directory given the path to a workflow markdown
// file. ImportPaths are relative to the workspace root (e.g. ".github/workflows/shared/foo.md"),
// so the workspace root is the directory that contains ".github/".
func resolveWorkspaceRoot(markdownPath string) string {
normalized := filepath.ToSlash(markdownPath)
if idx := strings.Index(normalized, "/.github/"); idx != -1 {
// Absolute or non-root-relative path: strip everything from "/.github/" onward.
return filepath.FromSlash(normalized[:idx])
}
if strings.HasPrefix(normalized, ".github/") {
// Path already starts at the workspace root.
return "."
}
// Fallback: use the directory containing the workflow file.
return filepath.Dir(markdownPath)
}
5 changes: 3 additions & 2 deletions pkg/workflow/frontmatter_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,9 @@ type FrontmatterConfig struct {
Cache map[string]any `json:"cache,omitempty"`

// Import and inclusion
Imports any `json:"imports,omitempty"` // Can be string or array
Include any `json:"include,omitempty"` // Can be string or array
Imports any `json:"imports,omitempty"` // Can be string or array
Include any `json:"include,omitempty"` // Can be string or array
InlinedImports bool `json:"inlined-imports,omitempty"` // If true, inline all imports at compile time instead of using runtime-import macros

// Metadata
Metadata map[string]string `json:"metadata,omitempty"` // Custom metadata key-value pairs
Expand Down
Loading
Loading