Skip to content

Commit 8691bc5

Browse files
authored
Merge pull request #65 from arm/custom_prompt
Add support for custom prompts
2 parents 334d648 + f166638 commit 8691bc5

File tree

7 files changed

+63
-3
lines changed

7 files changed

+63
-3
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "metis"
3-
version = "0.4.0"
3+
version = "0.5.0"
44
description = "Metis is a command line tool for performing security code reviews using LLMs"
55
readme = "README.md"
66
license = { file = "LICENSE.md" }

src/metis/cli/commands.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ def show_help():
4242
Options:
4343
--backend chroma|postgres Vector backend to use (default: chroma).
4444
--output-file PATH Save analysis results to this file.
45+
--custom-prompt PATH Custom prompt file (.md or .txt) to guide analysis.
4546
--project-schema SCHEMA (Optional) Project identifier if postresql is used.
4647
--chroma-dir DIR (Optional) Directory to store ChromaDB data (default: ./chromadb).
4748
--verbose (Optional) Shows detailed output in the terminal window.

src/metis/cli/entry.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from metis.configuration import load_runtime_config
1616
from metis.engine import MetisEngine
17+
from metis.utils import read_file_content
1718

1819
try:
1920
from metis.vector_store.pgvector_store import PGVectorStoreImpl
@@ -123,6 +124,11 @@ def main():
123124
)
124125
parser.add_argument("--log-file", type=str)
125126
parser.add_argument("--log-level", type=str, default="INFO")
127+
parser.add_argument(
128+
"--custom-prompt",
129+
type=str,
130+
help="Path to a custom prompt file (.md or .txt) used to guide analysis",
131+
)
126132
parser.add_argument("--version", action="store_true", help="Show program version")
127133
parser.add_argument(
128134
"-v", "--verbose", action="store_true", help="Enable verbose output"
@@ -190,10 +196,28 @@ def main():
190196
args, runtime, embed_model_code, embed_model_docs
191197
)
192198

199+
# Resolve custom analysis prompt text
200+
custom_prompt_text = None
201+
if args.custom_prompt:
202+
pf = Path(args.custom_prompt)
203+
if pf.is_file() and pf.suffix.lower() in {".md", ".txt"}:
204+
custom_prompt_text = read_file_content(str(pf))
205+
else:
206+
print_console(
207+
f"[yellow]Warning:[/yellow] Ignoring --custom-prompt '{escape(str(pf))}'. It must exist and have .md or .txt extension.",
208+
args.quiet,
209+
)
210+
if custom_prompt_text is None:
211+
# Fallback to .metis.md in project root (codebase path)
212+
metis_md = Path(args.codebase_path) / ".metis.md"
213+
if metis_md.is_file():
214+
custom_prompt_text = read_file_content(str(metis_md))
215+
193216
engine = MetisEngine(
194217
codebase_path=args.codebase_path,
195218
llm_provider=llm_provider,
196219
vector_backend=vector_backend,
220+
custom_prompt_text=custom_prompt_text,
197221
**runtime,
198222
)
199223

src/metis/engine/core.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
extract_content_from_diff,
3535
process_diff_file,
3636
prepare_nodes_iter,
37+
apply_custom_guidance,
3738
)
3839

3940

@@ -69,7 +70,14 @@ def __init__(
6970
setattr(self, k, kwargs[k])
7071

7172
self.llm_provider = llm_provider
73+
# Optional user-provided guidance to be appended to system prompts
74+
self.custom_prompt_text = kwargs.get("custom_prompt_text")
7275
self.plugin_config = load_plugin_config()
76+
77+
# Load precedence note from general prompts
78+
self.custom_guidance_precedence = self.plugin_config.get(
79+
"general_prompts", {}
80+
).get("custom_guidance_precedence", "")
7381
self.plugins = load_plugins(self.plugin_config)
7482

7583
# Cache splitters and extension/plugin lookups
@@ -355,6 +363,11 @@ def review_patch(self, patch_file, validate=False):
355363
issue.get("issue", "") for issue in review_dict.get("reviews", [])
356364
)
357365
summary_prompt = language_prompts["snippet_security_summary"]
366+
summary_prompt = apply_custom_guidance(
367+
summary_prompt,
368+
self.custom_prompt_text,
369+
self.custom_guidance_precedence,
370+
)
358371
changes_summary = summarize_changes(
359372
self.llm_provider, file_diff.path, issues, summary_prompt
360373
)
@@ -471,6 +484,9 @@ def _process_file_reviews(
471484

472485
for chunk in chunks:
473486
system_prompt = f"{language_prompts[default_prompt_key]} \n {language_prompts['security_review_checks']} \n {report_prompt}"
487+
system_prompt = apply_custom_guidance(
488+
system_prompt, self.custom_prompt_text, self.custom_guidance_precedence
489+
)
474490
review = perform_security_review(
475491
self.llm_provider, file_path, chunk, combined_context, system_prompt
476492
)

src/metis/engine/helpers.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,3 +156,17 @@ def prepare_nodes_iter(
156156
yield None
157157

158158
return nodes_code, nodes_docs
159+
160+
161+
def apply_custom_guidance(base_prompt, custom_guidance, precedence_note):
162+
"""Prepend precedence note and custom guidance to a base prompt.
163+
164+
If custom_guidance is not set, returns base_prompt unchanged. The format is:
165+
[precedence_note]\n\nCustom Guidance:\n{custom_guidance}\n\n{base_prompt}
166+
"""
167+
if not custom_guidance:
168+
return base_prompt
169+
guidance_block = f"Custom Guidance:\n{custom_guidance.strip()}"
170+
if precedence_note:
171+
return f"{precedence_note.strip()}\n\n{guidance_block}\n\n{base_prompt}"
172+
return f"{guidance_block}\n\n{base_prompt}"

src/metis/metis.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,12 @@ metis_engine:
1919

2020
llm_provider:
2121
name: "openai"
22-
model: "gpt-4.1"
22+
model: "gpt-5"
2323
code_embedding_model: "text-embedding-3-large"
2424
docs_embedding_model: "text-embedding-3-large"
2525

2626
query:
27-
model: "gpt-4.1"
27+
model: "gpt-5"
2828
similarity_top_k: 5
2929
response_mode: "tree_summarize"
3030
max_tokens: 5000

src/metis/plugins/plugins.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,11 @@ docs:
22
supported_extensions: [".md", ".html", ".txt", ".pdf", ".rst"]
33

44
general_prompts:
5+
custom_guidance_precedence: |-
6+
Instruction precedence: When 'Custom Guidance' conflicts with any default
7+
instructions or checks below, follow 'Custom Guidance'. If guidance excludes
8+
certain issue classes, omit them from analysis and do not report them unless
9+
directly necessary to explain another issue.
510
retrieve_context: |-
611
You are a senior software engineer and your task is to explain what the following FILE does and what its purpose is.
712
Include any data flows, function calls, or dependencies that could impact security.

0 commit comments

Comments
 (0)