Skip to content
Snippets Groups Projects
Unverified Commit 789e7727 authored by James Briggs's avatar James Briggs
Browse files

fix: improve exp backoff for openai encoder

parent f0f8d4c2
No related branches found
No related tags found
No related merge requests found
...@@ -52,7 +52,7 @@ class OpenAIEncoder(BaseEncoder): ...@@ -52,7 +52,7 @@ class OpenAIEncoder(BaseEncoder):
error_message = "" error_message = ""
# Exponential backoff # Exponential backoff
for j in range(3): for j in range(1, 7):
try: try:
embeds = self.client.embeddings.create( embeds = self.client.embeddings.create(
input=docs, input=docs,
......
...@@ -76,7 +76,7 @@ class RollingWindowSplitter(BaseSplitter): ...@@ -76,7 +76,7 @@ class RollingWindowSplitter(BaseSplitter):
if len(docs) == 1: if len(docs) == 1:
token_count = tiktoken_length(docs[0]) token_count = tiktoken_length(docs[0])
if token_count > self.max_split_tokens: if token_count > self.max_split_tokens:
logger.warning( logger.info(
f"Single document exceeds the maximum token limit " f"Single document exceeds the maximum token limit "
f"of {self.max_split_tokens}. " f"of {self.max_split_tokens}. "
"Splitting to sentences before semantically splitting." "Splitting to sentences before semantically splitting."
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment