Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 40 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -256,17 +256,52 @@ gosec -exclude-generated ./...
```

### Auto fixing vulnerabilities

gosec can suggest fixes based on AI recommendation. It will call an AI API to receive a suggestion for a security finding.

You can enable this feature by providing the following command line arguments:
- `ai-api-provider`: the name of the AI API provider, currently only `gemini`is supported.
- `ai-api-key` or set the environment variable `GOSEC_AI_API_KEY`: the key to access the AI API,
For gemini, you can create an API key following [these instructions](https://ai.google.dev/gemini-api/docs/api-key).
- `ai-endpoint`: the endpoint of the AI provider, this is optional argument.

- `ai-api-provider`: the name of the AI API provider. Supported providers:
- **Gemini**: `gemini-2.5-pro`, `gemini-2.5-flash`, `gemini-2.5-flash-lite`, `gemini-2.0-flash`, `gemini-2.0-flash-lite` (default)
- **Claude**: `claude-sonnet-4-0` (default), `claude-opus-4-0`, `claude-opus-4-1`, `claude-sonnet-3-7`
- **OpenAI**: `gpt-4o` (default), `gpt-4o-mini`
- **Custom OpenAI-compatible**: Any custom model name (requires `ai-base-url`)
- `ai-api-key` or set the environment variable `GOSEC_AI_API_KEY`: the key to access the AI API
- For Gemini, you can create an API key following [these instructions](https://ai.google.dev/gemini-api/docs/api-key)
- For Claude, get your API key from [Anthropic Console](https://console.anthropic.com/)
- For OpenAI, get your API key from [OpenAI Platform](https://platform.openai.com/api-keys)
- `ai-base-url`: (optional) custom base URL for OpenAI-compatible APIs (e.g., Azure OpenAI, LocalAI, Ollama)
- `ai-skip-ssl`: (optional) skip SSL certificate verification for AI API (useful for self-signed certificates)

**Examples:**

```bash
gosec -ai-api-provider="gemini" -ai-api-key="your_key" ./...
# Using Gemini
gosec -ai-api-provider="gemini-2.0-flash" -ai-api-key="your_key" ./...

# Using Claude
gosec -ai-api-provider="claude-sonnet-4-0" -ai-api-key="your_key" ./...

# Using OpenAI
gosec -ai-api-provider="gpt-4o" -ai-api-key="your_key" ./...

# Using Azure OpenAI
gosec -ai-api-provider="gpt-4o" \
-ai-api-key="your_azure_key" \
-ai-base-url="https://your-resource.openai.azure.com/openai/deployments/your-deployment" \
./...

# Using local Ollama with custom model
gosec -ai-api-provider="llama3.2" \
-ai-base-url="http://localhost:11434/v1" \
./...

# Using self-signed certificate API
gosec -ai-api-provider="custom-model" \
-ai-api-key="your_key" \
-ai-base-url="https://internal-api.company.com/v1" \
-ai-skip-ssl \
./...
```

### Annotating code
Expand Down
27 changes: 21 additions & 6 deletions autofix/ai.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ import (
const (
AIProviderFlagHelp = `AI API provider to generate auto fixes to issues. Valid options are:
- gemini-2.5-pro, gemini-2.5-flash, gemini-2.5-flash-lite, gemini-2.0-flash, gemini-2.0-flash-lite (gemini, default);
- claude-sonnet-4-0 (claude, default), claude-opus-4-0, claude-opus-4-1, claude-sonnet-3-7`
- claude-sonnet-4-0 (claude, default), claude-opus-4-0, claude-opus-4-1, claude-sonnet-3-7;
- gpt-4o (openai, default), gpt-4o-mini`

AIPrompt = `Provide a brief explanation and a solution to fix this security issue
in Go programming language: %q.
Expand All @@ -27,21 +28,35 @@ type GenAIClient interface {
}

// GenerateSolution generates a solution for the given issues using the specified AI provider
func GenerateSolution(model, aiAPIKey string, issues []*issue.Issue) (err error) {
func GenerateSolution(model, aiAPIKey, baseURL string, skipSSL bool, issues []*issue.Issue) (err error) {
var client GenAIClient

switch {
case strings.HasPrefix(model, "claude"):
client, err = NewClaudeClient(model, aiAPIKey)
case strings.HasPrefix(model, "gemini"):
client, err = NewGeminiClient(model, aiAPIKey)
case strings.HasPrefix(model, "gpt"):
config := OpenAIConfig{
Model: model,
APIKey: aiAPIKey,
BaseURL: baseURL,
SkipSSL: skipSSL,
}
client, err = NewOpenAIClient(config)
default:
// Default to OpenAI-compatible API for custom models
config := OpenAIConfig{
Model: model,
APIKey: aiAPIKey,
BaseURL: baseURL,
SkipSSL: skipSSL,
}
client, err = NewOpenAIClient(config)
}

switch {
case err != nil:
if err != nil {
return fmt.Errorf("initializing AI client: %w", err)
case client == nil:
return fmt.Errorf("unsupported AI backend: %s", model)
}

return generateSolution(client, issues)
Expand Down
7 changes: 5 additions & 2 deletions autofix/ai_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,11 @@ func TestGenerateSolution_UnsupportedProvider(t *testing.T) {
}

// Act
err := GenerateSolution("unsupported-provider", "test-api-key", issues)
// Note: With default OpenAI-compatible fallback, this will attempt to create an OpenAI client
// The test will fail during client initialization due to missing/invalid API key or base URL
err := GenerateSolution("custom-model", "", "", false, issues)

// Assert
require.EqualError(t, err, "unsupported AI backend: unsupported-provider")
// Expect an error during client initialization or API call
require.Error(t, err)
}
120 changes: 120 additions & 0 deletions autofix/openai.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
package autofix

import (
"context"
"crypto/tls"
"errors"
"fmt"
"net/http"

"github.com/openai/openai-go/v3"
"github.com/openai/openai-go/v3/option"
)

const (
ModelGPT4o = openai.ChatModelGPT4o
ModelGPT4oMini = openai.ChatModelGPT4oMini
DefaultOpenAIBaseURL = "https://api.openai.com/v1"
)

var _ GenAIClient = (*openaiWrapper)(nil)

type OpenAIConfig struct {
Model string
APIKey string
BaseURL string
MaxTokens int
Temperature float64
SkipSSL bool
}

type openaiWrapper struct {
client openai.Client
model openai.ChatModel
maxTokens int
temperature float64
}

func NewOpenAIClient(config OpenAIConfig) (GenAIClient, error) {
var options []option.RequestOption

if config.APIKey != "" {
options = append(options, option.WithAPIKey(config.APIKey))
}

// Support custom base URL (for OpenAI-compatible APIs)
if config.BaseURL != "" {
options = append(options, option.WithBaseURL(config.BaseURL))
}

// Support skip SSL verification
if config.SkipSSL {
// Create custom HTTP client with InsecureSkipVerify
httpClient := &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true, // #nosec G402
},
},
}
options = append(options, option.WithHTTPClient(httpClient))
}

openaiModel := parseOpenAIModel(config.Model)

// Set default values
maxTokens := config.MaxTokens
if maxTokens == 0 {
maxTokens = 1024
}

temperature := config.Temperature
if temperature == 0 {
temperature = 0.7
}

return &openaiWrapper{
client: openai.NewClient(options...),
model: openaiModel,
maxTokens: maxTokens,
temperature: temperature,
}, nil
}

func (o *openaiWrapper) GenerateSolution(ctx context.Context, prompt string) (string, error) {
params := openai.ChatCompletionNewParams{
Model: o.model,
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage(prompt),
},
}

// Set optional parameters if available
// Using WithMaxTokens and WithTemperature methods if they exist in v3
resp, err := o.client.Chat.Completions.New(ctx, params)
if err != nil {
return "", fmt.Errorf("generating autofix: %w", err)
}

if resp == nil || len(resp.Choices) == 0 {
return "", errors.New("no autofix returned by openai")
}

content := resp.Choices[0].Message.Content
if content == "" {
return "", errors.New("nothing found in the first autofix returned by openai")
}

return content, nil
}

func parseOpenAIModel(model string) openai.ChatModel {
switch model {
case "gpt-4o":
return openai.ChatModelGPT4o
case "gpt-4o-mini":
return openai.ChatModelGPT4oMini
default:
return openai.ChatModel(model)
}
}
8 changes: 7 additions & 1 deletion cmd/gosec/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,12 @@ var (
// key to implementing AI provider services
flagAiAPIKey = flag.String("ai-api-key", "", "Key to access the AI API")

// base URL for AI API (optional, for OpenAI-compatible APIs)
flagAiBaseURL = flag.String("ai-base-url", "", "Base URL for AI API (e.g., for OpenAI-compatible services)")

// skip SSL verification for AI API
flagAiSkipSSL = flag.Bool("ai-skip-ssl", false, "Skip SSL certificate verification for AI API")

// exclude the folders from scan
flagDirsExclude arrayFlags

Expand Down Expand Up @@ -509,7 +515,7 @@ func main() {
aiEnabled := *flagAiAPIProvider != ""

if len(issues) > 0 && aiEnabled {
err := autofix.GenerateSolution(*flagAiAPIProvider, aiAPIKey, issues)
err := autofix.GenerateSolution(*flagAiAPIProvider, aiAPIKey, *flagAiBaseURL, *flagAiSkipSSL, issues)
if err != nil {
logger.Print(err)
}
Expand Down
1 change: 1 addition & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ require (
github.com/mozilla/tls-observatory v0.0.0-20250923143331-eef96233227e
github.com/onsi/ginkgo/v2 v2.27.2
github.com/onsi/gomega v1.38.2
github.com/openai/openai-go/v3 v3.8.1
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2
github.com/stretchr/testify v1.11.1
golang.org/x/crypto v0.43.0
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,8 @@ github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zw
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
github.com/openai/openai-go/v3 v3.8.1 h1:b+YWsmwqXnbpSHWQEntZAkKciBZ5CJXwL68j+l59UDg=
github.com/openai/openai-go/v3 v3.8.1/go.mod h1:UOpNxkqC9OdNXNUfpNByKOtB4jAL0EssQXq5p8gO0Xs=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
Expand Down