feat: billing enhancement 20231204 (#1691)

Co-authored-by: jyong <jyong@dify.ai>
This commit is contained in:
Garfield Dai
2023-12-05 16:53:55 +08:00
committed by GitHub
parent cb3a55dae6
commit 7b8a10f3ea
12 changed files with 86 additions and 81 deletions

View File

@@ -227,36 +227,6 @@ class DatasetService:
return AppDatasetJoin.query.filter(AppDatasetJoin.dataset_id == dataset_id) \
.order_by(db.desc(AppDatasetJoin.created_at)).all()
@staticmethod
def get_tenant_datasets_usage(tenant_id):
# get the high_quality datasets
dataset_ids = db.session.query(Dataset.id).filter(Dataset.indexing_technique == 'high_quality',
Dataset.tenant_id == tenant_id).all()
if not dataset_ids:
return 0
dataset_ids = [result[0] for result in dataset_ids]
document_ids = db.session.query(Document.id).filter(Document.dataset_id.in_(dataset_ids),
Document.tenant_id == tenant_id,
Document.completed_at.isnot(None),
Document.enabled == True,
Document.archived == False
).all()
if not document_ids:
return 0
document_ids = [result[0] for result in document_ids]
document_segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id.in_(document_ids),
DocumentSegment.tenant_id == tenant_id,
DocumentSegment.completed_at.isnot(None),
DocumentSegment.enabled == True,
).all()
if not document_segments:
return 0
total_words_size = sum(document_segment.word_count * 3 for document_segment in document_segments)
total_vector_size = 1536 * 4 * len(document_segments)
return total_words_size + total_vector_size
class DocumentService:
DEFAULT_RULES = {
@@ -480,11 +450,6 @@ class DocumentService:
notion_info_list = document_data["data_source"]['info_list']['notion_info_list']
for notion_info in notion_info_list:
count = count + len(notion_info['pages'])
documents_count = DocumentService.get_tenant_documents_count()
total_count = documents_count + count
tenant_document_count = int(current_app.config['TENANT_DOCUMENT_COUNT'])
if total_count > tenant_document_count:
raise ValueError(f"over document limit {tenant_document_count}.")
# if dataset is empty, update dataset data_source_type
if not dataset.data_source_type:
dataset.data_source_type = document_data["data_source"]["type"]
@@ -770,13 +735,7 @@ class DocumentService:
notion_info_list = document_data["data_source"]['info_list']['notion_info_list']
for notion_info in notion_info_list:
count = count + len(notion_info['pages'])
# check document limit
if current_app.config['EDITION'] == 'CLOUD':
documents_count = DocumentService.get_tenant_documents_count()
total_count = documents_count + count
tenant_document_count = int(current_app.config['TENANT_DOCUMENT_COUNT'])
if total_count > tenant_document_count:
raise ValueError(f"All your documents have overed limit {tenant_document_count}.")
embedding_model = None
dataset_collection_binding_id = None
retrieval_model = None