Skip to content

Commit 539d64d

Browse files
Copilotlpcox
andauthored
Merge origin/main into copilot/fix-difc-proxy-404-error
Co-authored-by: lpcox <15877973+lpcox@users.noreply.github.com>
2 parents 271692b + 3906216 commit 539d64d

File tree

18 files changed

+459
-609
lines changed

18 files changed

+459
-609
lines changed

docs/CONFIGURATION.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -160,9 +160,10 @@ Run `./awmg --help` for full CLI options. Key flags:
160160
}
161161
```
162162

163-
- **`tools`** (optional): List of tool names intended to be exposed from this server
164-
- **Note**: This field is stored but not currently enforced at runtime; all tools from the backend are always exposed regardless of this value
165-
- Example: `["get_file_contents", "search_code"]`
163+
- **`tools`** (optional): List of tool names to allow for this server
164+
- Enforced at runtime: tools not in this list are hidden from `tools/list` responses and rejected when invoked via `tools/call` (for example, as an MCP/tool error)
165+
- Use `["*"]` (wildcard) to allow all tools (default behavior when field is omitted)
166+
- Example: `["get_file_contents", "search_code"]` (only these tools are accessible)
166167

167168
- **`registry`** (optional): Informational URI to the server's entry in an MCP registry
168169
- Used for documentation and discoverability purposes only; not used at runtime

guards/github-guard/rust-guard/src/labels/mod.rs

Lines changed: 33 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4797,6 +4797,37 @@ mod tests {
47974797
}
47984798
}
47994799

4800+
#[test]
4801+
fn test_apply_tool_labels_granular_pr_update_tools_writer_integrity() {
4802+
let ctx = default_ctx();
4803+
let repo_id = "github/copilot";
4804+
let tool_args = json!({
4805+
"owner": "github",
4806+
"repo": "copilot",
4807+
"pullNumber": 42
4808+
});
4809+
4810+
for tool in &[
4811+
"update_pull_request_body",
4812+
"update_pull_request_draft_state",
4813+
"update_pull_request_state",
4814+
"update_pull_request_title",
4815+
] {
4816+
let (secrecy, integrity, _desc) = apply_tool_labels(
4817+
tool,
4818+
&tool_args,
4819+
repo_id,
4820+
vec![],
4821+
vec![],
4822+
String::new(),
4823+
&ctx,
4824+
);
4825+
4826+
assert_eq!(secrecy, vec![] as Vec<String>, "{} secrecy mismatch", tool);
4827+
assert_eq!(integrity, writer_integrity(repo_id, &ctx), "{} should have writer integrity", tool);
4828+
}
4829+
}
4830+
48004831
#[test]
48014832
fn test_apply_tool_labels_granular_pr_review_tools_writer_integrity() {
48024833
let ctx = default_ctx();
@@ -4826,8 +4857,8 @@ mod tests {
48264857
&ctx,
48274858
);
48284859

4829-
assert_eq!(secrecy, vec![] as Vec<String>, "{tool} secrecy mismatch");
4830-
assert_eq!(integrity, writer_integrity(repo_id, &ctx), "{tool} should have writer integrity");
4860+
assert_eq!(secrecy, vec![] as Vec<String>, "{} secrecy mismatch", tool);
4861+
assert_eq!(integrity, writer_integrity(repo_id, &ctx), "{} should have writer integrity", tool);
48314862
}
48324863
}
48334864

guards/github-guard/rust-guard/src/labels/tool_rules.rs

Lines changed: 13 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -533,49 +533,30 @@ pub fn apply_tool_labels(
533533
integrity = writer_integrity(repo_id, ctx);
534534
}
535535

536-
// === Granular issue update tools (repo-scoped writes) ===
536+
// === Granular repo-scoped write operations ===
537+
// Covers granular issue PATCH tools, sub-issue management, granular PR PATCH tools,
538+
// and PR review tools. All follow: S = S(repo), I = writer.
537539
"update_issue_assignees"
538540
| "update_issue_body"
539541
| "update_issue_labels"
540542
| "update_issue_milestone"
541543
| "update_issue_state"
542544
| "update_issue_title"
543-
| "update_issue_type" => {
544-
// Granular PATCH tools that modify individual issue fields.
545-
// S = S(repo); I = writer
546-
secrecy = apply_repo_visibility_secrecy(&owner, &repo, repo_id, secrecy, ctx);
547-
integrity = writer_integrity(repo_id, ctx);
548-
}
549-
550-
// === Sub-issue management tools (repo-scoped writes) ===
551-
"add_sub_issue" | "remove_sub_issue" | "reprioritize_sub_issue" => {
552-
// Sub-issue link creation, removal, and reordering.
553-
// S = S(repo); I = writer
554-
secrecy = apply_repo_visibility_secrecy(&owner, &repo, repo_id, secrecy, ctx);
555-
integrity = writer_integrity(repo_id, ctx);
556-
}
557-
558-
// === Granular PR update tools (repo-scoped read-write) ===
559-
"update_pull_request_body"
545+
| "update_issue_type"
546+
| "add_sub_issue"
547+
| "remove_sub_issue"
548+
| "reprioritize_sub_issue"
549+
| "update_pull_request_body"
560550
| "update_pull_request_draft_state"
561551
| "update_pull_request_state"
562-
| "update_pull_request_title" => {
563-
// Granular PATCH tools that modify individual PR fields.
564-
// S = S(repo); I = writer
565-
secrecy = apply_repo_visibility_secrecy(&owner, &repo, repo_id, secrecy, ctx);
566-
integrity = writer_integrity(repo_id, ctx);
567-
}
568-
569-
// === PR review tools (repo-scoped writes) ===
570-
"add_pull_request_review_comment"
552+
| "update_pull_request_title"
553+
| "add_pull_request_review_comment"
571554
| "create_pull_request_review"
572555
| "delete_pending_pull_request_review"
573556
| "request_pull_request_reviewers"
574557
| "resolve_review_thread"
575558
| "submit_pending_pull_request_review"
576559
| "unresolve_review_thread" => {
577-
// PR review creation, commenting, submission, and thread resolution.
578-
// S = S(repo); I = writer
579560
secrecy = apply_repo_visibility_secrecy(&owner, &repo, repo_id, secrecy, ctx);
580561
integrity = writer_integrity(repo_id, ctx);
581562
}
@@ -746,16 +727,17 @@ fn check_file_secrecy(
746727
ctx: &PolicyContext,
747728
) -> Vec<String> {
748729
let path_lower = path.to_lowercase();
730+
let segments: Vec<&str> = path_lower.split('/').collect();
749731

750732
// Check for sensitive file extensions/names
751733
for pattern in SENSITIVE_FILE_PATTERNS {
752-
if path_lower.ends_with(pattern) || path_lower.split('/').any(|seg| seg.starts_with(*pattern)) {
734+
if path_lower.ends_with(pattern) || segments.iter().any(|seg| seg.starts_with(*pattern)) {
753735
return policy_private_scope_label(owner, repo, repo_id, ctx);
754736
}
755737
}
756738

757739
// Get filename
758-
let filename = path_lower.rsplit('/').next().unwrap_or(&path_lower);
740+
let filename = segments.last().copied().unwrap_or(path_lower.as_str());
759741

760742
// Check for sensitive keywords in filename
761743
for keyword in SENSITIVE_FILE_KEYWORDS {

internal/cmd/proxy.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ func runProxy(cmd *cobra.Command, args []string) error {
189189
// Resolve GitHub API URL: flag → env vars → default
190190
apiURL := proxyAPIURL
191191
if apiURL == "" {
192-
apiURL = proxy.DeriveGitHubAPIURL()
192+
apiURL = envutil.DeriveGitHubAPIURL("")
193193
}
194194
if apiURL == "" {
195195
apiURL = proxy.DefaultGitHubAPIBase

internal/config/expand.go

Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,133 @@
1+
package config
2+
3+
import (
4+
"fmt"
5+
"os"
6+
"regexp"
7+
8+
"github.com/github/gh-aw-mcpg/internal/config/rules"
9+
)
10+
11+
// Variable expression pattern: ${VARIABLE_NAME}
12+
var varExprPattern = regexp.MustCompile(`\$\{([A-Za-z_][A-Za-z0-9_]*)\}`)
13+
14+
// expandVariablesCore is the shared implementation for variable expansion.
15+
// It works with byte slices and handles the core expansion logic, tracking undefined variables.
16+
// This eliminates code duplication between expandVariables and ExpandRawJSONVariables.
17+
// It returns the expanded bytes, a slice of undefined variable names, and an error
18+
// (currently always nil).
19+
func expandVariablesCore(data []byte, contextDesc string) ([]byte, []string, error) {
20+
logValidation.Printf("Expanding variables: context=%s", contextDesc)
21+
var undefinedVars []string
22+
23+
result := varExprPattern.ReplaceAllFunc(data, func(match []byte) []byte {
24+
// Extract variable name (remove ${ and })
25+
varName := string(match[2 : len(match)-1])
26+
27+
if envValue, exists := os.LookupEnv(varName); exists {
28+
logValidation.Printf("Expanded variable: %s (found in environment)", varName)
29+
return []byte(envValue)
30+
}
31+
32+
// Track undefined variable
33+
undefinedVars = append(undefinedVars, varName)
34+
logValidation.Printf("Undefined variable: %s", varName)
35+
return match // Keep original if undefined
36+
})
37+
38+
logValidation.Printf("Variable expansion completed: context=%s, undefined_count=%d", contextDesc, len(undefinedVars))
39+
return result, undefinedVars, nil
40+
}
41+
42+
// expandVariables expands variable expressions in a string.
43+
// value is the source string and jsonPath identifies the config location for errors.
44+
// It returns the expanded string and an error if any variable is undefined.
45+
func expandVariables(value, jsonPath string) (string, error) {
46+
result, undefinedVars, _ := expandVariablesCore([]byte(value), fmt.Sprintf("jsonPath=%s", jsonPath))
47+
48+
if len(undefinedVars) > 0 {
49+
logValidation.Printf("Variable expansion failed: undefined variables=%v", undefinedVars)
50+
return "", rules.UndefinedVariable(undefinedVars[0], jsonPath)
51+
}
52+
53+
return string(result), nil
54+
}
55+
56+
// ExpandRawJSONVariables expands all ${VAR} expressions in JSON data before schema validation.
57+
// This ensures the schema validates the expanded values, not the variable syntax.
58+
// It collects undefined variables and reports the first undefined variable as an error.
59+
func ExpandRawJSONVariables(data []byte) ([]byte, error) {
60+
result, undefinedVars, _ := expandVariablesCore(data, "raw JSON data")
61+
62+
if len(undefinedVars) > 0 {
63+
logValidation.Printf("Variable expansion failed: undefined variables=%v", undefinedVars)
64+
return nil, rules.UndefinedVariable(undefinedVars[0], "configuration")
65+
}
66+
67+
return result, nil
68+
}
69+
70+
// expandEnvVariables expands all variable expressions in an env map.
71+
// env is the map to expand and serverName is used for config-path error context.
72+
// It returns a new map with expanded values or an error if any variable is undefined.
73+
func expandEnvVariables(env map[string]string, serverName string) (map[string]string, error) {
74+
logValidation.Printf("Expanding env variables for server: %s, count=%d", serverName, len(env))
75+
result := make(map[string]string, len(env))
76+
77+
for key, value := range env {
78+
jsonPath := fmt.Sprintf("mcpServers.%s.env.%s", serverName, key)
79+
80+
expanded, err := expandVariables(value, jsonPath)
81+
if err != nil {
82+
return nil, err
83+
}
84+
85+
result[key] = expanded
86+
}
87+
88+
logValidation.Printf("Env variable expansion completed for server: %s", serverName)
89+
return result, nil
90+
}
91+
92+
// expandTracingVariables expands ${VAR} expressions in TracingConfig fields.
93+
// This is called for TOML-loaded configs before validation, mirroring the
94+
// stdin JSON path where ExpandRawJSONVariables handles expansion.
95+
func expandTracingVariables(cfg *TracingConfig) error {
96+
if cfg == nil {
97+
return nil
98+
}
99+
100+
if cfg.Endpoint != "" {
101+
expanded, err := expandVariables(cfg.Endpoint, "gateway.opentelemetry.endpoint")
102+
if err != nil {
103+
return err
104+
}
105+
cfg.Endpoint = expanded
106+
}
107+
108+
if cfg.TraceID != "" {
109+
expanded, err := expandVariables(cfg.TraceID, "gateway.opentelemetry.traceId")
110+
if err != nil {
111+
return err
112+
}
113+
cfg.TraceID = expanded
114+
}
115+
116+
if cfg.SpanID != "" {
117+
expanded, err := expandVariables(cfg.SpanID, "gateway.opentelemetry.spanId")
118+
if err != nil {
119+
return err
120+
}
121+
cfg.SpanID = expanded
122+
}
123+
124+
if cfg.Headers != "" {
125+
expanded, err := expandVariables(cfg.Headers, "gateway.opentelemetry.headers")
126+
if err != nil {
127+
return err
128+
}
129+
cfg.Headers = expanded
130+
}
131+
132+
return nil
133+
}

0 commit comments

Comments
 (0)