Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
LlamaIndexTS
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
mirrored_repos
MachineLearning
run-llama
LlamaIndexTS
Commits
2a0a899d
Unverified
Commit
2a0a899d
authored
3 weeks ago
by
ANKIT VARSHNEY
Committed by
GitHub
3 weeks ago
Browse files
Options
Downloads
Patches
Plain Diff
chore: added saftey setting as parameter for gemini (#1760)
parent
050cd534
No related branches found
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
.changeset/heavy-ants-turn.md
+5
-0
5 additions, 0 deletions
.changeset/heavy-ants-turn.md
packages/providers/google/src/base.ts
+9
-4
9 additions, 4 deletions
packages/providers/google/src/base.ts
packages/providers/google/src/vertex.ts
+3
-2
3 additions, 2 deletions
packages/providers/google/src/vertex.ts
with
17 additions
and
6 deletions
.changeset/heavy-ants-turn.md
0 → 100644
+
5
−
0
View file @
2a0a899d
---
"
@llamaindex/google"
:
patch
---
Added saftey setting parameter for gemini
This diff is collapsed.
Click to expand it.
packages/providers/google/src/base.ts
+
9
−
4
View file @
2a0a899d
...
@@ -6,6 +6,7 @@ import {
...
@@ -6,6 +6,7 @@ import {
type
ModelParams
as
GoogleModelParams
,
type
ModelParams
as
GoogleModelParams
,
type
RequestOptions
as
GoogleRequestOptions
,
type
RequestOptions
as
GoogleRequestOptions
,
type
GenerateContentStreamResult
as
GoogleStreamGenerateContentResult
,
type
GenerateContentStreamResult
as
GoogleStreamGenerateContentResult
,
type
SafetySetting
,
}
from
"
@google/generative-ai
"
;
}
from
"
@google/generative-ai
"
;
import
{
wrapLLMEvent
}
from
"
@llamaindex/core/decorator
"
;
import
{
wrapLLMEvent
}
from
"
@llamaindex/core/decorator
"
;
...
@@ -88,6 +89,7 @@ const DEFAULT_GEMINI_PARAMS = {
...
@@ -88,6 +89,7 @@ const DEFAULT_GEMINI_PARAMS = {
export
type
GeminiConfig
=
Partial
<
typeof
DEFAULT_GEMINI_PARAMS
>
&
{
export
type
GeminiConfig
=
Partial
<
typeof
DEFAULT_GEMINI_PARAMS
>
&
{
session
?:
IGeminiSession
;
session
?:
IGeminiSession
;
requestOptions
?:
GoogleRequestOptions
;
requestOptions
?:
GoogleRequestOptions
;
safetySettings
?:
SafetySetting
[];
};
};
/**
/**
...
@@ -112,7 +114,7 @@ export class GeminiSession implements IGeminiSession {
...
@@ -112,7 +114,7 @@ export class GeminiSession implements IGeminiSession {
):
GoogleGenerativeModel
{
):
GoogleGenerativeModel
{
return
this
.
gemini
.
getGenerativeModel
(
return
this
.
gemini
.
getGenerativeModel
(
{
{
safetySettings
:
DEFAULT_SAFETY_SETTINGS
,
safetySettings
:
metadata
.
safetySettings
??
DEFAULT_SAFETY_SETTINGS
,
...
metadata
,
...
metadata
,
},
},
requestOpts
,
requestOpts
,
...
@@ -218,6 +220,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
...
@@ -218,6 +220,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
maxTokens
?:
number
|
undefined
;
maxTokens
?:
number
|
undefined
;
#requestOptions
?:
GoogleRequestOptions
|
undefined
;
#requestOptions
?:
GoogleRequestOptions
|
undefined
;
session
:
IGeminiSession
;
session
:
IGeminiSession
;
safetySettings
:
SafetySetting
[];
constructor
(
init
?:
GeminiConfig
)
{
constructor
(
init
?:
GeminiConfig
)
{
super
();
super
();
...
@@ -227,13 +230,14 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
...
@@ -227,13 +230,14 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
this
.
maxTokens
=
init
?.
maxTokens
??
undefined
;
this
.
maxTokens
=
init
?.
maxTokens
??
undefined
;
this
.
session
=
init
?.
session
??
GeminiSessionStore
.
get
();
this
.
session
=
init
?.
session
??
GeminiSessionStore
.
get
();
this
.
#requestOptions
=
init
?.
requestOptions
??
undefined
;
this
.
#requestOptions
=
init
?.
requestOptions
??
undefined
;
this
.
safetySettings
=
init
?.
safetySettings
??
DEFAULT_SAFETY_SETTINGS
;
}
}
get
supportToolCall
():
boolean
{
get
supportToolCall
():
boolean
{
return
SUPPORT_TOOL_CALL_MODELS
.
includes
(
this
.
model
);
return
SUPPORT_TOOL_CALL_MODELS
.
includes
(
this
.
model
);
}
}
get
metadata
():
LLMMetadata
{
get
metadata
():
LLMMetadata
&
{
safetySettings
:
SafetySetting
[]
}
{
return
{
return
{
model
:
this
.
model
,
model
:
this
.
model
,
temperature
:
this
.
temperature
,
temperature
:
this
.
temperature
,
...
@@ -242,6 +246,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
...
@@ -242,6 +246,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
contextWindow
:
GEMINI_MODEL_INFO_MAP
[
this
.
model
].
contextWindow
,
contextWindow
:
GEMINI_MODEL_INFO_MAP
[
this
.
model
].
contextWindow
,
tokenizer
:
undefined
,
tokenizer
:
undefined
,
structuredOutput
:
false
,
structuredOutput
:
false
,
safetySettings
:
this
.
safetySettings
,
};
};
}
}
...
@@ -251,7 +256,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
...
@@ -251,7 +256,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
const
context
=
getChatContext
(
params
);
const
context
=
getChatContext
(
params
);
const
common
=
{
const
common
=
{
history
:
context
.
history
,
history
:
context
.
history
,
safetySettings
:
DEFAULT_SAFETY_SETTINGS
,
safetySettings
:
this
.
safetySettings
,
};
};
return
params
.
tools
?.
length
return
params
.
tools
?.
length
...
@@ -265,7 +270,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
...
@@ -265,7 +270,7 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
),
),
},
},
],
],
safetySettings
:
DEFAULT_SAFETY_SETTINGS
,
safetySettings
:
this
.
safetySettings
,
}
}
:
common
;
:
common
;
}
}
...
...
This diff is collapsed.
Click to expand it.
packages/providers/google/src/vertex.ts
+
3
−
2
View file @
2a0a899d
...
@@ -59,14 +59,15 @@ export class GeminiVertexSession implements IGeminiSession {
...
@@ -59,14 +59,15 @@ export class GeminiVertexSession implements IGeminiSession {
getGenerativeModel
(
getGenerativeModel
(
metadata
:
VertexModelParams
,
metadata
:
VertexModelParams
,
):
VertexGenerativeModelPreview
|
VertexGenerativeModel
{
):
VertexGenerativeModelPreview
|
VertexGenerativeModel
{
const
safetySettings
=
metadata
.
safetySettings
??
DEFAULT_SAFETY_SETTINGS
;
if
(
this
.
preview
)
{
if
(
this
.
preview
)
{
return
this
.
vertex
.
preview
.
getGenerativeModel
({
return
this
.
vertex
.
preview
.
getGenerativeModel
({
safetySettings
:
DEFAULT_SAFETY_SETTINGS
,
safetySettings
,
...
metadata
,
...
metadata
,
});
});
}
}
return
this
.
vertex
.
getGenerativeModel
({
return
this
.
vertex
.
getGenerativeModel
({
safetySettings
:
DEFAULT_SAFETY_SETTINGS
,
safetySettings
,
...
metadata
,
...
metadata
,
});
});
}
}
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment