chore: fix unnecessary string concatation in single line (#8311)

This commit is contained in:
Bowen Liang
2024-09-13 14:24:49 +08:00
committed by GitHub
parent 08c486452f
commit 6613b8f2e0
30 changed files with 46 additions and 49 deletions

View File

@@ -245,7 +245,7 @@ class RelytVector(BaseVector):
try:
from sqlalchemy.engine import Row
except ImportError:
raise ImportError("Could not import Row from sqlalchemy.engine. " "Please 'pip install sqlalchemy>=1.4'.")
raise ImportError("Could not import Row from sqlalchemy.engine. Please 'pip install sqlalchemy>=1.4'.")
filter_condition = ""
if filter is not None:

View File

@@ -88,7 +88,7 @@ class DatasetDocumentStore:
# NOTE: doc could already exist in the store, but we overwrite it
if not allow_update and segment_document:
raise ValueError(
f"doc_id {doc.metadata['doc_id']} already exists. " "Set allow_update to True to overwrite."
f"doc_id {doc.metadata['doc_id']} already exists. Set allow_update to True to overwrite."
)
# calc embedding use tokens

View File

@@ -50,7 +50,7 @@ class NotionExtractor(BaseExtractor):
integration_token = dify_config.NOTION_INTEGRATION_TOKEN
if integration_token is None:
raise ValueError(
"Must specify `integration_token` or set environment " "variable `NOTION_INTEGRATION_TOKEN`."
"Must specify `integration_token` or set environment variable `NOTION_INTEGRATION_TOKEN`."
)
self._notion_access_token = integration_token

View File

@@ -60,7 +60,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
"""
if chunk_overlap > chunk_size:
raise ValueError(
f"Got a larger chunk overlap ({chunk_overlap}) than chunk size " f"({chunk_size}), should be smaller."
f"Got a larger chunk overlap ({chunk_overlap}) than chunk size ({chunk_size}), should be smaller."
)
self._chunk_size = chunk_size
self._chunk_overlap = chunk_overlap
@@ -117,7 +117,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
if total + _len + (separator_len if len(current_doc) > 0 else 0) > self._chunk_size:
if total > self._chunk_size:
logger.warning(
f"Created a chunk of size {total}, " f"which is longer than the specified {self._chunk_size}"
f"Created a chunk of size {total}, which is longer than the specified {self._chunk_size}"
)
if len(current_doc) > 0:
doc = self._join_docs(current_doc, separator)
@@ -153,7 +153,7 @@ class TextSplitter(BaseDocumentTransformer, ABC):
except ImportError:
raise ValueError(
"Could not import transformers python package. " "Please install it with `pip install transformers`."
"Could not import transformers python package. Please install it with `pip install transformers`."
)
return cls(length_function=_huggingface_tokenizer_length, **kwargs)