๐Ÿ“ฆ langgenius / dify

๐Ÿ“„ enable_segment_to_index_task.py ยท 120 lines
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120import logging
import time

import click
from celery import shared_task

from core.db.session_factory import session_factory
from core.rag.index_processor.constant.doc_type import DocType
from core.rag.index_processor.constant.index_type import IndexStructureType
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from core.rag.models.document import AttachmentDocument, ChildDocument, Document
from extensions.ext_redis import redis_client
from libs.datetime_utils import naive_utc_now
from models.dataset import DocumentSegment

logger = logging.getLogger(__name__)


@shared_task(queue="dataset")
def enable_segment_to_index_task(segment_id: str):
    """
    Async enable segment to index
    :param segment_id:

    Usage: enable_segment_to_index_task.delay(segment_id)
    """
    logger.info(click.style(f"Start enable segment to index: {segment_id}", fg="green"))
    start_at = time.perf_counter()

    with session_factory.create_session() as session:
        segment = session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first()
        if not segment:
            logger.info(click.style(f"Segment not found: {segment_id}", fg="red"))
            return

        if segment.status != "completed":
            logger.info(click.style(f"Segment is not completed, enable is not allowed: {segment_id}", fg="red"))
            return

        indexing_cache_key = f"segment_{segment.id}_indexing"

        try:
            document = Document(
                page_content=segment.content,
                metadata={
                    "doc_id": segment.index_node_id,
                    "doc_hash": segment.index_node_hash,
                    "document_id": segment.document_id,
                    "dataset_id": segment.dataset_id,
                },
            )

            dataset = segment.dataset

            if not dataset:
                logger.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan"))
                return

            dataset_document = segment.document

            if not dataset_document:
                logger.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan"))
                return

            if (
                not dataset_document.enabled
                or dataset_document.archived
                or dataset_document.indexing_status != "completed"
            ):
                logger.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan"))
                return

            index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
            if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX:
                child_chunks = segment.get_child_chunks()
                if child_chunks:
                    child_documents = []
                    for child_chunk in child_chunks:
                        child_document = ChildDocument(
                            page_content=child_chunk.content,
                            metadata={
                                "doc_id": child_chunk.index_node_id,
                                "doc_hash": child_chunk.index_node_hash,
                                "document_id": segment.document_id,
                                "dataset_id": segment.dataset_id,
                            },
                        )
                        child_documents.append(child_document)
                    document.children = child_documents
            multimodel_documents = []
            if dataset.is_multimodal:
                for attachment in segment.attachments:
                    multimodel_documents.append(
                        AttachmentDocument(
                            page_content=attachment["name"],
                            metadata={
                                "doc_id": attachment["id"],
                                "doc_hash": "",
                                "document_id": segment.document_id,
                                "dataset_id": segment.dataset_id,
                                "doc_type": DocType.IMAGE,
                            },
                        )
                    )

            # save vector index
            index_processor.load(dataset, [document], multimodal_documents=multimodel_documents)

            end_at = time.perf_counter()
            logger.info(click.style(f"Segment enabled to index: {segment.id} latency: {end_at - start_at}", fg="green"))
        except Exception as e:
            logger.exception("enable segment to index failed")
            segment.enabled = False
            segment.disabled_at = naive_utc_now()
            segment.status = "error"
            segment.error = str(e)
            session.commit()
        finally:
            redis_client.delete(indexing_cache_key)