1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110package examples
import (
"context"
"fmt"
"os"
"log"
"encoding/json"
"google.golang.org/genai"
)
func SafetySettings() error {
// [START safety_settings]
ctx := context.Background()
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: os.Getenv("GEMINI_API_KEY"),
Backend: genai.BackendGeminiAPI,
})
if err != nil {
log.Fatal(err)
}
unsafePrompt := "I support Martians Soccer Club and I think Jupiterians Football Club sucks! " +
"Write a ironic phrase about them including expletives."
config := &genai.GenerateContentConfig{
SafetySettings: []*genai.SafetySetting{
{
Category: "HARM_CATEGORY_HARASSMENT",
Threshold: "BLOCK_ONLY_HIGH",
},
},
}
contents := []*genai.Content{
genai.NewContentFromText(unsafePrompt, genai.RoleUser),
}
response, err := client.Models.GenerateContent(ctx, "gemini-2.0-flash", contents, config)
if err != nil {
log.Fatal(err)
}
// Print the finish reason and safety ratings from the first candidate.
if len(response.Candidates) > 0 {
fmt.Println("Finish reason:", response.Candidates[0].FinishReason)
safetyRatings, err := json.MarshalIndent(response.Candidates[0].SafetyRatings, "", " ")
if err != nil {
return err
}
fmt.Println("Safety ratings:", string(safetyRatings))
} else {
fmt.Println("No candidate returned.")
}
// [END safety_settings]
return err
}
func SafetySettingsMulti() error {
// [START safety_settings_multi]
ctx := context.Background()
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: os.Getenv("GEMINI_API_KEY"),
Backend: genai.BackendGeminiAPI,
})
if err != nil {
log.Fatal(err)
}
unsafePrompt := "I support Martians Soccer Club and I think Jupiterians Football Club sucks! " +
"Write a ironic phrase about them including expletives."
config := &genai.GenerateContentConfig{
SafetySettings: []*genai.SafetySetting{
{
Category: "HARM_CATEGORY_HATE_SPEECH",
Threshold: "BLOCK_MEDIUM_AND_ABOVE",
},
{
Category: "HARM_CATEGORY_HARASSMENT",
Threshold: "BLOCK_ONLY_HIGH",
},
},
}
contents := []*genai.Content{
genai.NewContentFromText(unsafePrompt, genai.RoleUser),
}
response, err := client.Models.GenerateContent(ctx, "gemini-2.0-flash", contents, config)
if err != nil {
log.Fatal(err)
}
// Print the generated text.
text := response.Text()
fmt.Println("Generated text:", text)
// Print the and safety ratings from the first candidate.
if len(response.Candidates) > 0 {
fmt.Println("Finish reason:", response.Candidates[0].FinishReason)
safetyRatings, err := json.MarshalIndent(response.Candidates[0].SafetyRatings, "", " ")
if err != nil {
return err
}
fmt.Println("Safety ratings:", string(safetyRatings))
} else {
fmt.Println("No candidate returned.")
}
// [END safety_settings_multi]
return err
}