mirror of
https://gitee.com/dify_ai/dify.git
synced 2025-12-06 19:42:42 +08:00
Compare commits
18 Commits
fix/plugin
...
fix/notion
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23670072d9 | ||
|
|
8f64327d57 | ||
|
|
1d41b642ca | ||
|
|
cff039d123 | ||
|
|
0cfdb8c043 | ||
|
|
db20f9bb71 | ||
|
|
2020a31785 | ||
|
|
2c04a16eaa | ||
|
|
6325129761 | ||
|
|
9a18a98b58 | ||
|
|
7b9e01aa07 | ||
|
|
2bb19f85c6 | ||
|
|
17fe62cf91 | ||
|
|
e99861d4fe | ||
|
|
a205ee16b9 | ||
|
|
9835730278 | ||
|
|
8331b63baa | ||
|
|
2df4699312 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -179,6 +179,7 @@ docker/volumes/pgvecto_rs/data/*
|
||||
docker/volumes/couchbase/*
|
||||
docker/volumes/oceanbase/*
|
||||
docker/volumes/plugin_daemon/*
|
||||
docker/volumes/matrixone/*
|
||||
!docker/volumes/oceanbase/init.d
|
||||
|
||||
docker/nginx/conf.d/default.conf
|
||||
|
||||
@@ -137,7 +137,7 @@ WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
|
||||
|
||||
# Vector database configuration
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore
|
||||
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Weaviate configuration
|
||||
@@ -294,6 +294,13 @@ VIKINGDB_SCHEMA=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
|
||||
# Matrixone configration
|
||||
MATRIXONE_HOST=127.0.0.1
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_DATABASE=dify
|
||||
|
||||
# Lindorm configuration
|
||||
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||
LINDORM_USERNAME=admin
|
||||
@@ -332,9 +339,11 @@ PROMPT_GENERATION_MAX_TOKENS=512
|
||||
CODE_GENERATION_MAX_TOKENS=1024
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
|
||||
# Mail configuration, support: resend, smtp
|
||||
# Mail configuration, support: resend, smtp, sendgrid
|
||||
MAIL_TYPE=
|
||||
# If using SendGrid, use the 'from' field for authentication if necessary.
|
||||
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
|
||||
# resend configuration
|
||||
RESEND_API_KEY=
|
||||
RESEND_API_URL=https://api.resend.com
|
||||
# smtp configuration
|
||||
@@ -344,7 +353,8 @@ SMTP_USERNAME=123
|
||||
SMTP_PASSWORD=abc
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
|
||||
# Sendgid configuration
|
||||
SENDGRID_API_KEY=
|
||||
# Sentry configuration
|
||||
SENTRY_DSN=
|
||||
|
||||
|
||||
@@ -281,6 +281,7 @@ def migrate_knowledge_vector_database():
|
||||
VectorType.ELASTICSEARCH,
|
||||
VectorType.OPENGAUSS,
|
||||
VectorType.TABLESTORE,
|
||||
VectorType.MATRIXONE,
|
||||
}
|
||||
lower_collection_vector_types = {
|
||||
VectorType.ANALYTICDB,
|
||||
|
||||
@@ -609,7 +609,7 @@ class MailConfig(BaseSettings):
|
||||
"""
|
||||
|
||||
MAIL_TYPE: Optional[str] = Field(
|
||||
description="Email service provider type ('smtp' or 'resend'), default to None.",
|
||||
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
@@ -663,6 +663,11 @@ class MailConfig(BaseSettings):
|
||||
default=50,
|
||||
)
|
||||
|
||||
SENDGRID_API_KEY: Optional[str] = Field(
|
||||
description="API key for SendGrid service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class RagEtlConfig(BaseSettings):
|
||||
"""
|
||||
|
||||
@@ -24,6 +24,7 @@ from .vdb.couchbase_config import CouchbaseConfig
|
||||
from .vdb.elasticsearch_config import ElasticsearchConfig
|
||||
from .vdb.huawei_cloud_config import HuaweiCloudConfig
|
||||
from .vdb.lindorm_config import LindormConfig
|
||||
from .vdb.matrixone_config import MatrixoneConfig
|
||||
from .vdb.milvus_config import MilvusConfig
|
||||
from .vdb.myscale_config import MyScaleConfig
|
||||
from .vdb.oceanbase_config import OceanBaseVectorConfig
|
||||
@@ -323,5 +324,6 @@ class MiddlewareConfig(
|
||||
OpenGaussConfig,
|
||||
TableStoreConfig,
|
||||
DatasetQueueMonitorConfig,
|
||||
MatrixoneConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
14
api/configs/middleware/vdb/matrixone_config.py
Normal file
14
api/configs/middleware/vdb/matrixone_config.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
"""Matrixone vector database configuration."""
|
||||
|
||||
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
|
||||
MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server")
|
||||
MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone")
|
||||
MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone")
|
||||
MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to")
|
||||
MATRIXONE_METRIC: str = Field(
|
||||
default="l2", description="Distance metric type for vector similarity search (cosine or l2)"
|
||||
)
|
||||
@@ -17,6 +17,8 @@ from libs.login import login_required
|
||||
from models import Account
|
||||
from models.model import App
|
||||
from services.app_dsl_service import AppDslService, ImportStatus
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
from services.feature_service import FeatureService
|
||||
|
||||
|
||||
class AppImportApi(Resource):
|
||||
@@ -60,7 +62,9 @@ class AppImportApi(Resource):
|
||||
app_id=args.get("app_id"),
|
||||
)
|
||||
session.commit()
|
||||
|
||||
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
|
||||
# update web app setting as private
|
||||
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
|
||||
# Return appropriate status code based on result
|
||||
status = result.status
|
||||
if status == ImportStatus.FAILED.value:
|
||||
|
||||
@@ -34,6 +34,20 @@ class WorkflowAppLogApi(Resource):
|
||||
parser.add_argument(
|
||||
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_end_user_session_id",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_account",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
@@ -57,6 +71,8 @@ class WorkflowAppLogApi(Resource):
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
created_by_end_user_session_id=args.created_by_end_user_session_id,
|
||||
created_by_account=args.created_by_account,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
@@ -686,6 +686,7 @@ class DatasetRetrievalSettingApi(Resource):
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.TENCENT
|
||||
| VectorType.MATRIXONE
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
@@ -733,6 +734,7 @@ class DatasetRetrievalSettingMockApi(Resource):
|
||||
| VectorType.TABLESTORE
|
||||
| VectorType.TENCENT
|
||||
| VectorType.HUAWEI_CLOUD
|
||||
| VectorType.MATRIXONE
|
||||
):
|
||||
return {
|
||||
"retrieval_method": [
|
||||
|
||||
@@ -15,7 +15,7 @@ class LoadBalancingCredentialsValidateApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider: str):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
@@ -64,7 +64,7 @@ class LoadBalancingConfigCredentialsValidateApi(Resource):
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self, provider: str, config_id: str):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role):
|
||||
if not TenantAccountRole.is_privileged_role(current_user.current_role):
|
||||
raise Forbidden()
|
||||
|
||||
tenant_id = current_user.current_tenant_id
|
||||
|
||||
@@ -135,6 +135,20 @@ class WorkflowAppLogApi(Resource):
|
||||
parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
|
||||
parser.add_argument("created_at__before", type=str, location="args")
|
||||
parser.add_argument("created_at__after", type=str, location="args")
|
||||
parser.add_argument(
|
||||
"created_by_end_user_session_id",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_by_account",
|
||||
type=str,
|
||||
location="args",
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
@@ -158,6 +172,8 @@ class WorkflowAppLogApi(Resource):
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
created_by_end_user_session_id=args.created_by_end_user_session_id,
|
||||
created_by_account=args.created_by_account,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
@@ -367,6 +367,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
:param user: account or end user
|
||||
:param invoke_from: invoke from source
|
||||
:param application_generate_entity: application generate entity
|
||||
:param workflow_execution_repository: repository for workflow execution
|
||||
:param workflow_node_execution_repository: repository for workflow node execution
|
||||
:param conversation: conversation
|
||||
:param stream: is stream
|
||||
|
||||
@@ -195,6 +195,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
:param user: account or end user
|
||||
:param application_generate_entity: application generate entity
|
||||
:param invoke_from: invoke from source
|
||||
:param workflow_execution_repository: repository for workflow execution
|
||||
:param workflow_node_execution_repository: repository for workflow node execution
|
||||
:param streaming: is stream
|
||||
:param workflow_thread_pool_id: workflow thread pool id
|
||||
|
||||
0
api/core/rag/datasource/vdb/matrixone/__init__.py
Normal file
0
api/core/rag/datasource/vdb/matrixone/__init__.py
Normal file
233
api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
Normal file
233
api/core/rag/datasource/vdb/matrixone/matrixone_vector.py
Normal file
@@ -0,0 +1,233 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from functools import wraps
|
||||
from typing import Any, Optional
|
||||
|
||||
from mo_vector.client import MoVectorClient # type: ignore
|
||||
from pydantic import BaseModel, model_validator
|
||||
|
||||
from configs import dify_config
|
||||
from core.rag.datasource.vdb.vector_base import BaseVector
|
||||
from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory
|
||||
from core.rag.datasource.vdb.vector_type import VectorType
|
||||
from core.rag.embedding.embedding_base import Embeddings
|
||||
from core.rag.models.document import Document
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.dataset import Dataset
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MatrixoneConfig(BaseModel):
|
||||
host: str = "localhost"
|
||||
port: int = 6001
|
||||
user: str = "dump"
|
||||
password: str = "111"
|
||||
database: str = "dify"
|
||||
metric: str = "l2"
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_config(cls, values: dict) -> dict:
|
||||
if not values["host"]:
|
||||
raise ValueError("config host is required")
|
||||
if not values["port"]:
|
||||
raise ValueError("config port is required")
|
||||
if not values["user"]:
|
||||
raise ValueError("config user is required")
|
||||
if not values["password"]:
|
||||
raise ValueError("config password is required")
|
||||
if not values["database"]:
|
||||
raise ValueError("config database is required")
|
||||
return values
|
||||
|
||||
|
||||
def ensure_client(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(None, False)
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class MatrixoneVector(BaseVector):
|
||||
"""
|
||||
Matrixone vector storage implementation.
|
||||
"""
|
||||
|
||||
def __init__(self, collection_name: str, config: MatrixoneConfig):
|
||||
super().__init__(collection_name)
|
||||
self.config = config
|
||||
self.collection_name = collection_name.lower()
|
||||
self.client = None
|
||||
|
||||
@property
|
||||
def collection_name(self):
|
||||
return self._collection_name
|
||||
|
||||
@collection_name.setter
|
||||
def collection_name(self, value):
|
||||
self._collection_name = value
|
||||
|
||||
def get_type(self) -> str:
|
||||
return VectorType.MATRIXONE
|
||||
|
||||
def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
return self.add_texts(texts, embeddings)
|
||||
|
||||
def _get_client(self, dimension: Optional[int] = None, create_table: bool = False) -> MoVectorClient:
|
||||
"""
|
||||
Create a new client for the collection.
|
||||
|
||||
The collection will be created if it doesn't exist.
|
||||
"""
|
||||
lock_name = f"vector_indexing_lock_{self._collection_name}"
|
||||
with redis_client.lock(lock_name, timeout=20):
|
||||
client = MoVectorClient(
|
||||
connection_string=f"mysql+pymysql://{self.config.user}:{self.config.password}@{self.config.host}:{self.config.port}/{self.config.database}",
|
||||
table_name=self.collection_name,
|
||||
vector_dimension=dimension,
|
||||
create_table=create_table,
|
||||
)
|
||||
collection_exist_cache_key = f"vector_indexing_{self._collection_name}"
|
||||
if redis_client.get(collection_exist_cache_key):
|
||||
return client
|
||||
try:
|
||||
client.create_full_text_index()
|
||||
except Exception as e:
|
||||
logger.exception("Failed to create full text index")
|
||||
redis_client.set(collection_exist_cache_key, 1, ex=3600)
|
||||
return client
|
||||
|
||||
def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs):
|
||||
if self.client is None:
|
||||
self.client = self._get_client(len(embeddings[0]), True)
|
||||
assert self.client is not None
|
||||
ids = []
|
||||
for _, doc in enumerate(documents):
|
||||
if doc.metadata is not None:
|
||||
doc_id = doc.metadata.get("doc_id", str(uuid.uuid4()))
|
||||
ids.append(doc_id)
|
||||
self.client.insert(
|
||||
texts=[doc.page_content for doc in documents],
|
||||
embeddings=embeddings,
|
||||
metadatas=[doc.metadata for doc in documents],
|
||||
ids=ids,
|
||||
)
|
||||
return ids
|
||||
|
||||
@ensure_client
|
||||
def text_exists(self, id: str) -> bool:
|
||||
assert self.client is not None
|
||||
result = self.client.get(ids=[id])
|
||||
return len(result) > 0
|
||||
|
||||
@ensure_client
|
||||
def delete_by_ids(self, ids: list[str]) -> None:
|
||||
assert self.client is not None
|
||||
if not ids:
|
||||
return
|
||||
self.client.delete(ids=ids)
|
||||
|
||||
@ensure_client
|
||||
def get_ids_by_metadata_field(self, key: str, value: str):
|
||||
assert self.client is not None
|
||||
results = self.client.query_by_metadata(filter={key: value})
|
||||
return [result.id for result in results]
|
||||
|
||||
@ensure_client
|
||||
def delete_by_metadata_field(self, key: str, value: str) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete(filter={key: value})
|
||||
|
||||
@ensure_client
|
||||
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
|
||||
results = self.client.query(
|
||||
query_vector=query_vector,
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
# TODO: add the score threshold to the query
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
|
||||
assert self.client is not None
|
||||
top_k = kwargs.get("top_k", 5)
|
||||
document_ids_filter = kwargs.get("document_ids_filter")
|
||||
filter = None
|
||||
if document_ids_filter:
|
||||
filter = {"document_id": {"$in": document_ids_filter}}
|
||||
score_threshold = float(kwargs.get("score_threshold", 0.0))
|
||||
|
||||
results = self.client.full_text_query(
|
||||
keywords=[query],
|
||||
k=top_k,
|
||||
filter=filter,
|
||||
)
|
||||
|
||||
docs = []
|
||||
for result in results:
|
||||
metadata = result.metadata
|
||||
if isinstance(metadata, str):
|
||||
import json
|
||||
|
||||
metadata = json.loads(metadata)
|
||||
score = 1 - result.distance
|
||||
if score >= score_threshold:
|
||||
metadata["score"] = score
|
||||
docs.append(
|
||||
Document(
|
||||
page_content=result.document,
|
||||
metadata=metadata,
|
||||
)
|
||||
)
|
||||
return docs
|
||||
|
||||
@ensure_client
|
||||
def delete(self) -> None:
|
||||
assert self.client is not None
|
||||
self.client.delete()
|
||||
|
||||
|
||||
class MatrixoneVectorFactory(AbstractVectorFactory):
|
||||
def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MatrixoneVector:
|
||||
if dataset.index_struct_dict:
|
||||
class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"]
|
||||
collection_name = class_prefix
|
||||
else:
|
||||
dataset_id = dataset.id
|
||||
collection_name = Dataset.gen_collection_name_by_id(dataset_id)
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.MATRIXONE, collection_name))
|
||||
|
||||
config = MatrixoneConfig(
|
||||
host=dify_config.MATRIXONE_HOST or "localhost",
|
||||
port=dify_config.MATRIXONE_PORT or 6001,
|
||||
user=dify_config.MATRIXONE_USER or "dump",
|
||||
password=dify_config.MATRIXONE_PASSWORD or "111",
|
||||
database=dify_config.MATRIXONE_DATABASE or "dify",
|
||||
metric=dify_config.MATRIXONE_METRIC or "l2",
|
||||
)
|
||||
return MatrixoneVector(collection_name=collection_name, config=config)
|
||||
@@ -164,6 +164,10 @@ class Vector:
|
||||
from core.rag.datasource.vdb.huawei.huawei_cloud_vector import HuaweiCloudVectorFactory
|
||||
|
||||
return HuaweiCloudVectorFactory
|
||||
case VectorType.MATRIXONE:
|
||||
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneVectorFactory
|
||||
|
||||
return MatrixoneVectorFactory
|
||||
case _:
|
||||
raise ValueError(f"Vector store {vector_type} is not supported.")
|
||||
|
||||
|
||||
@@ -29,3 +29,4 @@ class VectorType(StrEnum):
|
||||
OPENGAUSS = "opengauss"
|
||||
TABLESTORE = "tablestore"
|
||||
HUAWEI_CLOUD = "huawei_cloud"
|
||||
MATRIXONE = "matrixone"
|
||||
|
||||
@@ -45,7 +45,8 @@ class WeaviateVector(BaseVector):
|
||||
# by changing the connection timeout to pypi.org from 1 second to 0.001 seconds.
|
||||
# TODO: This can be removed once weaviate-client is updated to 3.26.7 or higher,
|
||||
# which does not contain the deprecation check.
|
||||
weaviate.connect.connection.PYPI_TIMEOUT = 0.001
|
||||
if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"):
|
||||
weaviate.connect.connection.PYPI_TIMEOUT = 0.001
|
||||
|
||||
try:
|
||||
client = weaviate.Client(
|
||||
|
||||
@@ -151,17 +151,12 @@ class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository):
|
||||
existing = session.scalar(select(WorkflowRun).where(WorkflowRun.id == domain_model.id_))
|
||||
if not existing:
|
||||
# For new records, get the next sequence number
|
||||
# in case multiple executions are created concurrently, use for update
|
||||
stmt = (
|
||||
select(func.coalesce(func.max(WorkflowRun.sequence_number), 0) + 1)
|
||||
.where(
|
||||
WorkflowRun.app_id == self._app_id,
|
||||
WorkflowRun.tenant_id == self._tenant_id,
|
||||
)
|
||||
.with_for_update()
|
||||
stmt = select(func.max(WorkflowRun.sequence_number)).where(
|
||||
WorkflowRun.app_id == self._app_id,
|
||||
WorkflowRun.tenant_id == self._tenant_id,
|
||||
)
|
||||
next_seq = session.scalar(stmt)
|
||||
db_model.sequence_number = int(next_seq) if next_seq is not None else 1
|
||||
max_sequence = session.scalar(stmt)
|
||||
db_model.sequence_number = (max_sequence or 0) + 1
|
||||
else:
|
||||
# For updates, keep the existing sequence number
|
||||
db_model.sequence_number = existing.sequence_number
|
||||
|
||||
@@ -57,7 +57,6 @@ class StreamProcessor(ABC):
|
||||
|
||||
# The branch_identify parameter is added to ensure that
|
||||
# only nodes in the correct logical branch are included.
|
||||
reachable_node_ids.append(edge.target_node_id)
|
||||
ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id, run_result.edge_source_handle)
|
||||
reachable_node_ids.extend(ids)
|
||||
else:
|
||||
@@ -74,6 +73,8 @@ class StreamProcessor(ABC):
|
||||
self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids)
|
||||
|
||||
def _fetch_node_ids_in_reachable_branch(self, node_id: str, branch_identify: Optional[str] = None) -> list[str]:
|
||||
if node_id not in self.rest_node_ids:
|
||||
self.rest_node_ids.append(node_id)
|
||||
node_ids = []
|
||||
for edge in self.graph.edge_mapping.get(node_id, []):
|
||||
if edge.target_node_id == self.graph.root_node_id:
|
||||
|
||||
@@ -8,4 +8,5 @@ EMPTY_VALUE_MAPPING = {
|
||||
SegmentType.ARRAY_STRING: [],
|
||||
SegmentType.ARRAY_NUMBER: [],
|
||||
SegmentType.ARRAY_OBJECT: [],
|
||||
SegmentType.ARRAY_FILE: [],
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from typing import Any
|
||||
|
||||
from core.file import File
|
||||
from core.variables import SegmentType
|
||||
|
||||
from .enums import Operation
|
||||
@@ -85,6 +86,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
|
||||
return isinstance(value, int | float)
|
||||
case SegmentType.ARRAY_OBJECT if operation == Operation.APPEND:
|
||||
return isinstance(value, dict)
|
||||
case SegmentType.ARRAY_FILE if operation == Operation.APPEND:
|
||||
return isinstance(value, File)
|
||||
|
||||
# Array & Extend / Overwrite
|
||||
case SegmentType.ARRAY_ANY if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
@@ -95,6 +98,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va
|
||||
return isinstance(value, list) and all(isinstance(item, int | float) for item in value)
|
||||
case SegmentType.ARRAY_OBJECT if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
return isinstance(value, list) and all(isinstance(item, dict) for item in value)
|
||||
case SegmentType.ARRAY_FILE if operation in {Operation.EXTEND, Operation.OVER_WRITE}:
|
||||
return isinstance(value, list) and all(isinstance(item, File) for item in value)
|
||||
|
||||
case _:
|
||||
return False
|
||||
|
||||
@@ -54,6 +54,15 @@ class Mail:
|
||||
use_tls=dify_config.SMTP_USE_TLS,
|
||||
opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS,
|
||||
)
|
||||
case "sendgrid":
|
||||
from libs.sendgrid import SendGridClient
|
||||
|
||||
if not dify_config.SENDGRID_API_KEY:
|
||||
raise ValueError("SENDGRID_API_KEY is required for SendGrid mail type")
|
||||
|
||||
self._client = SendGridClient(
|
||||
sendgrid_api_key=dify_config.SENDGRID_API_KEY, _from=dify_config.MAIL_DEFAULT_SEND_FROM or ""
|
||||
)
|
||||
case _:
|
||||
raise ValueError("Unsupported mail type {}".format(mail_type))
|
||||
|
||||
|
||||
@@ -101,6 +101,8 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen
|
||||
result = ArrayNumberVariable.model_validate(mapping)
|
||||
case SegmentType.ARRAY_OBJECT if isinstance(value, list):
|
||||
result = ArrayObjectVariable.model_validate(mapping)
|
||||
case SegmentType.ARRAY_FILE if isinstance(value, list):
|
||||
result = ArrayFileVariable.model_validate(mapping)
|
||||
case _:
|
||||
raise VariableError(f"not supported value type {value_type}")
|
||||
if result.size > dify_config.MAX_VARIABLE_SIZE:
|
||||
|
||||
42
api/libs/sendgrid.py
Normal file
42
api/libs/sendgrid.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import logging
|
||||
|
||||
import sendgrid # type: ignore
|
||||
from python_http_client.exceptions import ForbiddenError, UnauthorizedError
|
||||
from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore
|
||||
|
||||
|
||||
class SendGridClient:
|
||||
def __init__(self, sendgrid_api_key: str, _from: str):
|
||||
self.sendgrid_api_key = sendgrid_api_key
|
||||
self._from = _from
|
||||
|
||||
def send(self, mail: dict):
|
||||
logging.debug("Sending email with SendGrid")
|
||||
|
||||
try:
|
||||
_to = mail["to"]
|
||||
|
||||
if not _to:
|
||||
raise ValueError("SendGridClient: Cannot send email: recipient address is missing.")
|
||||
|
||||
sg = sendgrid.SendGridAPIClient(api_key=self.sendgrid_api_key)
|
||||
from_email = Email(self._from)
|
||||
to_email = To(_to)
|
||||
subject = mail["subject"]
|
||||
content = Content("text/html", mail["html"])
|
||||
mail = Mail(from_email, to_email, subject, content)
|
||||
mail_json = mail.get() # type: ignore
|
||||
response = sg.client.mail.send.post(request_body=mail_json)
|
||||
logging.debug(response.status_code)
|
||||
logging.debug(response.body)
|
||||
logging.debug(response.headers)
|
||||
|
||||
except TimeoutError as e:
|
||||
logging.exception("SendGridClient Timeout occurred while sending email")
|
||||
raise
|
||||
except (UnauthorizedError, ForbiddenError) as e:
|
||||
logging.exception("SendGridClient Authentication failed. Verify that your credentials and the 'from")
|
||||
raise
|
||||
except Exception as e:
|
||||
logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}")
|
||||
raise
|
||||
@@ -18,4 +18,3 @@ ignore_missing_imports=True
|
||||
|
||||
[mypy-flask_restful.inputs]
|
||||
ignore_missing_imports=True
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ dependencies = [
|
||||
"weave~=0.51.0",
|
||||
"yarl~=1.18.3",
|
||||
"webvtt-py~=0.5.1",
|
||||
"sendgrid~=6.12.3",
|
||||
]
|
||||
# Before adding new dependency, consider place it in
|
||||
# alphabet order (a-z) and suitable group.
|
||||
@@ -202,4 +203,5 @@ vdb = [
|
||||
"volcengine-compat~=1.0.0",
|
||||
"weaviate-client~=3.24.0",
|
||||
"xinference-client~=1.2.2",
|
||||
"mo-vector~=0.1.13",
|
||||
]
|
||||
|
||||
@@ -976,12 +976,17 @@ class DocumentService:
|
||||
process_rule = knowledge_config.process_rule
|
||||
if process_rule:
|
||||
if process_rule.mode in ("custom", "hierarchical"):
|
||||
dataset_process_rule = DatasetProcessRule(
|
||||
dataset_id=dataset.id,
|
||||
mode=process_rule.mode,
|
||||
rules=process_rule.rules.model_dump_json() if process_rule.rules else None,
|
||||
created_by=account.id,
|
||||
)
|
||||
if process_rule.rules:
|
||||
dataset_process_rule = DatasetProcessRule(
|
||||
dataset_id=dataset.id,
|
||||
mode=process_rule.mode,
|
||||
rules=process_rule.rules.model_dump_json() if process_rule.rules else None,
|
||||
created_by=account.id,
|
||||
)
|
||||
else:
|
||||
dataset_process_rule = dataset.latest_process_rule
|
||||
if not dataset_process_rule:
|
||||
raise ValueError("No process rule found.")
|
||||
elif process_rule.mode == "automatic":
|
||||
dataset_process_rule = DatasetProcessRule(
|
||||
dataset_id=dataset.id,
|
||||
@@ -1402,16 +1407,16 @@ class DocumentService:
|
||||
knowledge_config.embedding_model, # type: ignore
|
||||
)
|
||||
dataset_collection_binding_id = dataset_collection_binding.id
|
||||
if knowledge_config.retrieval_model:
|
||||
retrieval_model = knowledge_config.retrieval_model
|
||||
else:
|
||||
retrieval_model = RetrievalModel(
|
||||
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
|
||||
reranking_enable=False,
|
||||
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
|
||||
top_k=2,
|
||||
score_threshold_enabled=False,
|
||||
)
|
||||
if knowledge_config.retrieval_model:
|
||||
retrieval_model = knowledge_config.retrieval_model
|
||||
else:
|
||||
retrieval_model = RetrievalModel(
|
||||
search_method=RetrievalMethod.SEMANTIC_SEARCH.value,
|
||||
reranking_enable=False,
|
||||
reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""),
|
||||
top_k=2,
|
||||
score_threshold_enabled=False,
|
||||
)
|
||||
# save dataset
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant_id,
|
||||
|
||||
@@ -101,7 +101,7 @@ class WeightModel(BaseModel):
|
||||
|
||||
|
||||
class RetrievalModel(BaseModel):
|
||||
search_method: Literal["hybrid_search", "semantic_search", "full_text_search"]
|
||||
search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"]
|
||||
reranking_enable: bool
|
||||
reranking_model: Optional[RerankingModel] = None
|
||||
reranking_mode: Optional[str] = None
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
|
||||
import click
|
||||
|
||||
from core.entities import DEFAULT_PLUGIN_ID
|
||||
from core.plugin.entities.plugin import GenericProviderID, ModelProviderID, ToolProviderID
|
||||
from models.engine import db
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -12,17 +12,17 @@ logger = logging.getLogger(__name__)
|
||||
class PluginDataMigration:
|
||||
@classmethod
|
||||
def migrate(cls) -> None:
|
||||
cls.migrate_db_records("providers", "provider_name") # large table
|
||||
cls.migrate_db_records("provider_models", "provider_name")
|
||||
cls.migrate_db_records("provider_orders", "provider_name")
|
||||
cls.migrate_db_records("tenant_default_models", "provider_name")
|
||||
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name")
|
||||
cls.migrate_db_records("provider_model_settings", "provider_name")
|
||||
cls.migrate_db_records("load_balancing_model_configs", "provider_name")
|
||||
cls.migrate_db_records("providers", "provider_name", ModelProviderID) # large table
|
||||
cls.migrate_db_records("provider_models", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("provider_orders", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tenant_default_models", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tenant_preferred_model_providers", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("provider_model_settings", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("load_balancing_model_configs", "provider_name", ModelProviderID)
|
||||
cls.migrate_datasets()
|
||||
cls.migrate_db_records("embeddings", "provider_name") # large table
|
||||
cls.migrate_db_records("dataset_collection_bindings", "provider_name")
|
||||
cls.migrate_db_records("tool_builtin_providers", "provider")
|
||||
cls.migrate_db_records("embeddings", "provider_name", ModelProviderID) # large table
|
||||
cls.migrate_db_records("dataset_collection_bindings", "provider_name", ModelProviderID)
|
||||
cls.migrate_db_records("tool_builtin_providers", "provider_name", ToolProviderID)
|
||||
|
||||
@classmethod
|
||||
def migrate_datasets(cls) -> None:
|
||||
@@ -66,9 +66,10 @@ limit 1000"""
|
||||
fg="white",
|
||||
)
|
||||
)
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"] = (
|
||||
f"{DEFAULT_PLUGIN_ID}/{retrieval_model['reranking_model']['reranking_provider_name']}/{retrieval_model['reranking_model']['reranking_provider_name']}"
|
||||
)
|
||||
# update google to langgenius/gemini/google etc.
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"] = ModelProviderID(
|
||||
retrieval_model["reranking_model"]["reranking_provider_name"]
|
||||
).to_string()
|
||||
retrieval_model_changed = True
|
||||
|
||||
click.echo(
|
||||
@@ -86,9 +87,11 @@ limit 1000"""
|
||||
update_retrieval_model_sql = ", retrieval_model = :retrieval_model"
|
||||
params["retrieval_model"] = json.dumps(retrieval_model)
|
||||
|
||||
params["provider_name"] = ModelProviderID(provider_name).to_string()
|
||||
|
||||
sql = f"""update {table_name}
|
||||
set {provider_column_name} =
|
||||
concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name})
|
||||
:provider_name
|
||||
{update_retrieval_model_sql}
|
||||
where id = :record_id"""
|
||||
conn.execute(db.text(sql), params)
|
||||
@@ -122,7 +125,9 @@ limit 1000"""
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def migrate_db_records(cls, table_name: str, provider_column_name: str) -> None:
|
||||
def migrate_db_records(
|
||||
cls, table_name: str, provider_column_name: str, provider_cls: type[GenericProviderID]
|
||||
) -> None:
|
||||
click.echo(click.style(f"Migrating [{table_name}] data for plugin", fg="white"))
|
||||
|
||||
processed_count = 0
|
||||
@@ -166,7 +171,8 @@ limit 1000"""
|
||||
)
|
||||
|
||||
try:
|
||||
updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}"
|
||||
# update jina to langgenius/jina_tool/jina etc.
|
||||
updated_value = provider_cls(provider_name).to_string()
|
||||
batch_updates.append((updated_value, record_id))
|
||||
except Exception as e:
|
||||
failed_ids.append(record_id)
|
||||
|
||||
@@ -5,7 +5,7 @@ from sqlalchemy import and_, func, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
|
||||
from models import App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models.enums import CreatorUserRole
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ class WorkflowAppService:
|
||||
created_at_after: datetime | None = None,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
created_by_end_user_session_id: str | None = None,
|
||||
created_by_account: str | None = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Get paginate workflow app logs using SQLAlchemy 2.0 style
|
||||
@@ -32,6 +34,8 @@ class WorkflowAppService:
|
||||
:param created_at_after: filter logs created after this timestamp
|
||||
:param page: page number
|
||||
:param limit: items per page
|
||||
:param created_by_end_user_session_id: filter by end user session id
|
||||
:param created_by_account: filter by account email
|
||||
:return: Pagination object
|
||||
"""
|
||||
# Build base statement using SQLAlchemy 2.0 style
|
||||
@@ -71,6 +75,26 @@ class WorkflowAppService:
|
||||
if created_at_after:
|
||||
stmt = stmt.where(WorkflowAppLog.created_at >= created_at_after)
|
||||
|
||||
# Filter by end user session id or account email
|
||||
if created_by_end_user_session_id:
|
||||
stmt = stmt.join(
|
||||
EndUser,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == EndUser.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.END_USER,
|
||||
EndUser.session_id == created_by_end_user_session_id,
|
||||
),
|
||||
)
|
||||
if created_by_account:
|
||||
stmt = stmt.join(
|
||||
Account,
|
||||
and_(
|
||||
WorkflowAppLog.created_by == Account.id,
|
||||
WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT,
|
||||
Account.email == created_by_account,
|
||||
),
|
||||
)
|
||||
|
||||
stmt = stmt.order_by(WorkflowAppLog.created_at.desc())
|
||||
|
||||
# Get total count using the same filters
|
||||
|
||||
25
api/tests/integration_tests/vdb/matrixone/test_matrixone.py
Normal file
25
api/tests/integration_tests/vdb/matrixone/test_matrixone.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector
|
||||
from tests.integration_tests.vdb.test_vector_store import (
|
||||
AbstractVectorTest,
|
||||
get_example_text,
|
||||
setup_mock_redis,
|
||||
)
|
||||
|
||||
|
||||
class MatrixoneVectorTest(AbstractVectorTest):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.vector = MatrixoneVector(
|
||||
collection_name=self.collection_name,
|
||||
config=MatrixoneConfig(
|
||||
host="localhost", port=6001, user="dump", password="111", database="dify", metric="l2"
|
||||
),
|
||||
)
|
||||
|
||||
def get_ids_by_metadata_field(self):
|
||||
ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id)
|
||||
assert len(ids) == 1
|
||||
|
||||
|
||||
def test_matrixone_vector(setup_mock_redis):
|
||||
MatrixoneVectorTest().run_all_tests()
|
||||
4458
api/uv.lock
generated
4458
api/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -399,7 +399,7 @@ SUPABASE_URL=your-server-url
|
||||
# ------------------------------
|
||||
|
||||
# The type of vector store to use.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`.
|
||||
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
|
||||
@@ -490,6 +490,13 @@ TIDB_VECTOR_USER=
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_VECTOR_DATABASE=dify
|
||||
|
||||
# Matrixone vector configurations.
|
||||
MATRIXONE_HOST=matrixone
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_DATABASE=dify
|
||||
|
||||
# Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant`
|
||||
TIDB_ON_QDRANT_URL=http://127.0.0.1
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
@@ -719,10 +726,11 @@ NOTION_INTERNAL_SECRET=
|
||||
# Mail related configuration
|
||||
# ------------------------------
|
||||
|
||||
# Mail type, support: resend, smtp
|
||||
# Mail type, support: resend, smtp, sendgrid
|
||||
MAIL_TYPE=resend
|
||||
|
||||
# Default send from email address, if not specified
|
||||
# If using SendGrid, use the 'from' field for authentication if necessary.
|
||||
MAIL_DEFAULT_SEND_FROM=
|
||||
|
||||
# API-Key for the Resend email provider, used when MAIL_TYPE is `resend`.
|
||||
@@ -738,6 +746,9 @@ SMTP_PASSWORD=
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
|
||||
# Sendgid configuration
|
||||
SENDGRID_API_KEY=
|
||||
|
||||
# ------------------------------
|
||||
# Others Configuration
|
||||
# ------------------------------
|
||||
|
||||
@@ -617,6 +617,18 @@ services:
|
||||
ports:
|
||||
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
|
||||
|
||||
# Matrixone vector store.
|
||||
matrixone:
|
||||
hostname: matrixone
|
||||
image: matrixorigin/matrixone:2.1.1
|
||||
profiles:
|
||||
- matrixone
|
||||
restart: always
|
||||
volumes:
|
||||
- ./volumes/matrixone/data:/mo-data
|
||||
ports:
|
||||
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
|
||||
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
|
||||
elasticsearch:
|
||||
|
||||
@@ -195,6 +195,11 @@ x-shared-env: &shared-api-worker-env
|
||||
TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-}
|
||||
TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-}
|
||||
TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify}
|
||||
MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone}
|
||||
MATRIXONE_PORT: ${MATRIXONE_PORT:-6001}
|
||||
MATRIXONE_USER: ${MATRIXONE_USER:-dump}
|
||||
MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111}
|
||||
MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify}
|
||||
TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1}
|
||||
TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify}
|
||||
TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20}
|
||||
@@ -322,6 +327,7 @@ x-shared-env: &shared-api-worker-env
|
||||
SMTP_PASSWORD: ${SMTP_PASSWORD:-}
|
||||
SMTP_USE_TLS: ${SMTP_USE_TLS:-true}
|
||||
SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false}
|
||||
SENDGRID_API_KEY: ${SENDGRID_API_KEY:-}
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000}
|
||||
INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72}
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5}
|
||||
@@ -1124,6 +1130,18 @@ services:
|
||||
ports:
|
||||
- ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123}
|
||||
|
||||
# Matrixone vector store.
|
||||
matrixone:
|
||||
hostname: matrixone
|
||||
image: matrixorigin/matrixone:2.1.1
|
||||
profiles:
|
||||
- matrixone
|
||||
restart: always
|
||||
volumes:
|
||||
- ./volumes/matrixone/data:/mo-data
|
||||
ports:
|
||||
- ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001}
|
||||
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html
|
||||
# https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites
|
||||
elasticsearch:
|
||||
|
||||
@@ -81,7 +81,7 @@ const Datasets = ({
|
||||
currentContainer?.removeEventListener('scroll', onScroll)
|
||||
onScroll.cancel()
|
||||
}
|
||||
}, [onScroll])
|
||||
}, [containerRef, onScroll])
|
||||
|
||||
return (
|
||||
<nav className='grid shrink-0 grow grid-cols-1 content-start gap-4 px-12 pt-2 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4'>
|
||||
|
||||
@@ -5,34 +5,34 @@ import {
|
||||
RiAddLine,
|
||||
RiArrowRightLine,
|
||||
} from '@remixicon/react'
|
||||
import Link from 'next/link'
|
||||
|
||||
const CreateAppCard = (
|
||||
{
|
||||
ref,
|
||||
..._
|
||||
},
|
||||
) => {
|
||||
type CreateAppCardProps = {
|
||||
ref?: React.Ref<HTMLAnchorElement>
|
||||
}
|
||||
|
||||
const CreateAppCard = ({ ref }: CreateAppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<div className='bg-background-default-dimm flex min-h-[160px] flex-col rounded-xl border-[0.5px]
|
||||
border-components-panel-border transition-all duration-200 ease-in-out'
|
||||
>
|
||||
<a ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
|
||||
<Link ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
|
||||
<div className='flex items-center gap-3'>
|
||||
<div className='flex h-10 w-10 items-center justify-center rounded-lg border border-dashed border-divider-regular bg-background-default-lighter
|
||||
p-2 group-hover:border-solid group-hover:border-effects-highlight group-hover:bg-background-default-dodge'
|
||||
>
|
||||
<RiAddLine className='h-4 w-4 text-text-tertiary group-hover:text-text-accent'/>
|
||||
<RiAddLine className='h-4 w-4 text-text-tertiary group-hover:text-text-accent' />
|
||||
</div>
|
||||
<div className='system-md-semibold text-text-secondary group-hover:text-text-accent'>{t('dataset.createDataset')}</div>
|
||||
</div>
|
||||
</a>
|
||||
</Link>
|
||||
<div className='system-xs-regular p-4 pt-0 text-text-tertiary'>{t('dataset.createDatasetIntro')}</div>
|
||||
<a className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
|
||||
<Link className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
|
||||
<div className='system-xs-medium text-text-tertiary group-hover:text-text-accent'>{t('dataset.connectDataset')}</div>
|
||||
<RiArrowRightLine className='h-3.5 w-3.5 text-text-tertiary group-hover:text-text-accent' />
|
||||
</a>
|
||||
</Link>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -8,15 +8,17 @@ import { useRouter } from 'next/navigation'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
export default function DatasetsLayout({ children }: { children: React.ReactNode }) {
|
||||
const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator } = useAppContext()
|
||||
const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, currentWorkspace, isLoadingCurrentWorkspace } = useAppContext()
|
||||
const router = useRouter()
|
||||
|
||||
useEffect(() => {
|
||||
if (!isCurrentWorkspaceEditor && !isCurrentWorkspaceDatasetOperator)
|
||||
if (isLoadingCurrentWorkspace || !currentWorkspace.id)
|
||||
return
|
||||
if (!(isCurrentWorkspaceEditor || isCurrentWorkspaceDatasetOperator))
|
||||
router.replace('/apps')
|
||||
}, [isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, router])
|
||||
}, [isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace, currentWorkspace, router])
|
||||
|
||||
if (!isCurrentWorkspaceEditor && !isCurrentWorkspaceDatasetOperator)
|
||||
if (isLoadingCurrentWorkspace || !(isCurrentWorkspaceEditor || isCurrentWorkspaceDatasetOperator))
|
||||
return <Loading type='app' />
|
||||
return (
|
||||
<ExternalKnowledgeApiProvider>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import ReactEcharts from 'echarts-for-react'
|
||||
import SyntaxHighlighter from 'react-syntax-highlighter'
|
||||
import {
|
||||
@@ -62,6 +62,17 @@ const getCorrectCapitalizationLanguageName = (language: string) => {
|
||||
// visit https://reactjs.org/docs/error-decoder.html?invariant=185 for the full message
|
||||
// or use the non-minified dev environment for full errors and additional helpful warnings.
|
||||
|
||||
// Define ECharts event parameter types
|
||||
interface EChartsEventParams {
|
||||
type: string;
|
||||
seriesIndex?: number;
|
||||
dataIndex?: number;
|
||||
name?: string;
|
||||
value?: any;
|
||||
currentIndex?: number; // Added for timeline events
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any) => {
|
||||
const { theme } = useTheme()
|
||||
const [isSVG, setIsSVG] = useState(true)
|
||||
@@ -70,6 +81,11 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
const echartsRef = useRef<any>(null)
|
||||
const contentRef = useRef<string>('')
|
||||
const processedRef = useRef<boolean>(false) // Track if content was successfully processed
|
||||
const instanceIdRef = useRef<string>(`chart-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`) // Unique ID for logging
|
||||
const isInitialRenderRef = useRef<boolean>(true) // Track if this is initial render
|
||||
const chartInstanceRef = useRef<any>(null) // Direct reference to ECharts instance
|
||||
const resizeTimerRef = useRef<NodeJS.Timeout | null>(null) // For debounce handling
|
||||
const finishedEventCountRef = useRef<number>(0) // Track finished event trigger count
|
||||
const match = /language-(\w+)/.exec(className || '')
|
||||
const language = match?.[1]
|
||||
const languageShowName = getCorrectCapitalizationLanguageName(language || '')
|
||||
@@ -85,36 +101,64 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
width: 'auto',
|
||||
}) as any, [])
|
||||
|
||||
const echartsOnEvents = useMemo(() => ({
|
||||
finished: () => {
|
||||
const instance = echartsRef.current?.getEchartsInstance?.()
|
||||
if (instance)
|
||||
instance.resize()
|
||||
// Debounce resize operations
|
||||
const debouncedResize = useCallback(() => {
|
||||
if (resizeTimerRef.current)
|
||||
clearTimeout(resizeTimerRef.current)
|
||||
|
||||
resizeTimerRef.current = setTimeout(() => {
|
||||
if (chartInstanceRef.current)
|
||||
chartInstanceRef.current.resize()
|
||||
resizeTimerRef.current = null
|
||||
}, 200)
|
||||
}, [])
|
||||
|
||||
// Handle ECharts instance initialization
|
||||
const handleChartReady = useCallback((instance: any) => {
|
||||
chartInstanceRef.current = instance
|
||||
|
||||
// Force resize to ensure timeline displays correctly
|
||||
setTimeout(() => {
|
||||
if (chartInstanceRef.current)
|
||||
chartInstanceRef.current.resize()
|
||||
}, 200)
|
||||
}, [])
|
||||
|
||||
// Store event handlers in useMemo to avoid recreating them
|
||||
const echartsEvents = useMemo(() => ({
|
||||
finished: (params: EChartsEventParams) => {
|
||||
// Limit finished event frequency to avoid infinite loops
|
||||
finishedEventCountRef.current++
|
||||
if (finishedEventCountRef.current > 3) {
|
||||
// Stop processing after 3 times to avoid infinite loops
|
||||
return
|
||||
}
|
||||
|
||||
if (chartInstanceRef.current) {
|
||||
// Use debounced resize
|
||||
debouncedResize()
|
||||
}
|
||||
},
|
||||
}), [echartsRef]) // echartsRef is stable, so this effectively runs once.
|
||||
}), [debouncedResize])
|
||||
|
||||
// Handle container resize for echarts
|
||||
useEffect(() => {
|
||||
if (language !== 'echarts' || !echartsRef.current) return
|
||||
if (language !== 'echarts' || !chartInstanceRef.current) return
|
||||
|
||||
const handleResize = () => {
|
||||
// This gets the echarts instance from the component
|
||||
const instance = echartsRef.current?.getEchartsInstance?.()
|
||||
if (instance)
|
||||
instance.resize()
|
||||
if (chartInstanceRef.current)
|
||||
// Use debounced resize
|
||||
debouncedResize()
|
||||
}
|
||||
|
||||
window.addEventListener('resize', handleResize)
|
||||
|
||||
// Also manually trigger resize after a short delay to ensure proper sizing
|
||||
const resizeTimer = setTimeout(handleResize, 200)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', handleResize)
|
||||
clearTimeout(resizeTimer)
|
||||
if (resizeTimerRef.current)
|
||||
clearTimeout(resizeTimerRef.current)
|
||||
}
|
||||
}, [language, echartsRef.current])
|
||||
|
||||
}, [language, debouncedResize])
|
||||
// Process chart data when content changes
|
||||
useEffect(() => {
|
||||
// Only process echarts content
|
||||
@@ -222,6 +266,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}
|
||||
}, [language, children])
|
||||
|
||||
// Cache rendered content to avoid unnecessary re-renders
|
||||
const renderCodeContent = useMemo(() => {
|
||||
const content = String(children).replace(/\n$/, '')
|
||||
switch (language) {
|
||||
@@ -274,6 +319,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
|
||||
// Success state: show the chart
|
||||
if (chartState === 'success' && finalChartOption) {
|
||||
// Reset finished event counter
|
||||
finishedEventCountRef.current = 0
|
||||
|
||||
return (
|
||||
<div style={{
|
||||
minWidth: '300px',
|
||||
@@ -286,13 +334,20 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}}>
|
||||
<ErrorBoundary>
|
||||
<ReactEcharts
|
||||
ref={echartsRef}
|
||||
ref={(e) => {
|
||||
if (e && isInitialRenderRef.current) {
|
||||
echartsRef.current = e
|
||||
isInitialRenderRef.current = false
|
||||
}
|
||||
}}
|
||||
option={finalChartOption}
|
||||
style={echartsStyle}
|
||||
theme={isDarkMode ? 'dark' : undefined}
|
||||
opts={echartsOpts}
|
||||
notMerge={true}
|
||||
onEvents={echartsOnEvents}
|
||||
notMerge={false}
|
||||
lazyUpdate={false}
|
||||
onEvents={echartsEvents}
|
||||
onChartReady={handleChartReady}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
@@ -363,7 +418,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
</SyntaxHighlighter>
|
||||
)
|
||||
}
|
||||
}, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, echartsOnEvents])
|
||||
}, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, handleChartReady, echartsEvents])
|
||||
|
||||
if (inline || !match)
|
||||
return <code {...props} className={className}>{children}</code>
|
||||
|
||||
@@ -533,6 +533,12 @@ Workflow applications offers non-session support and is ideal for translation, a
|
||||
<Property name='limit' type='int' key='limit'>
|
||||
How many chat history messages to return in one request, default is 20.
|
||||
</Property>
|
||||
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
|
||||
Created by which endUser, for example, `abc-123`.
|
||||
</Property>
|
||||
<Property name='created_by_account' type='str' key='created_by_account'>
|
||||
Created by which email account, for example, lizb@test.com.
|
||||
</Property>
|
||||
</Properties>
|
||||
|
||||
### Response
|
||||
|
||||
@@ -534,6 +534,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
||||
<Property name='limit' type='int' key='limit'>
|
||||
1回のリクエストで返すチャット履歴メッセージの数、デフォルトは20。
|
||||
</Property>
|
||||
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
|
||||
どのendUserによって作成されたか、例えば、`abc-123`。
|
||||
</Property>
|
||||
<Property name='created_by_account' type='str' key='created_by_account'>
|
||||
どのメールアカウントによって作成されたか、例えば、lizb@test.com。
|
||||
</Property>
|
||||
</Properties>
|
||||
|
||||
### 応答
|
||||
|
||||
@@ -522,6 +522,12 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
||||
<Property name='limit' type='int' key='limit'>
|
||||
每页条数, 默认20.
|
||||
</Property>
|
||||
<Property name='created_by_end_user_session_id' type='str' key='created_by_end_user_session_id'>
|
||||
由哪个endUser创建,例如,`abc-123`.
|
||||
</Property>
|
||||
<Property name='created_by_account' type='str' key='created_by_account'>
|
||||
由哪个邮箱账户创建,例如,lizb@test.com.
|
||||
</Property>
|
||||
</Properties>
|
||||
|
||||
### Response
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
useWorkflowRun,
|
||||
useWorkflowStartRun,
|
||||
} from '../hooks'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
|
||||
type WorkflowMainProps = Pick<WorkflowProps, 'nodes' | 'edges' | 'viewport'>
|
||||
const WorkflowMain = ({
|
||||
@@ -20,14 +21,28 @@ const WorkflowMain = ({
|
||||
viewport,
|
||||
}: WorkflowMainProps) => {
|
||||
const featuresStore = useFeaturesStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
const handleWorkflowDataUpdate = useCallback((payload: any) => {
|
||||
if (payload.features && featuresStore) {
|
||||
const {
|
||||
features,
|
||||
conversation_variables,
|
||||
environment_variables,
|
||||
} = payload
|
||||
if (features && featuresStore) {
|
||||
const { setFeatures } = featuresStore.getState()
|
||||
|
||||
setFeatures(payload.features)
|
||||
setFeatures(features)
|
||||
}
|
||||
}, [featuresStore])
|
||||
if (conversation_variables) {
|
||||
const { setConversationVariables } = workflowStore.getState()
|
||||
setConversationVariables(conversation_variables)
|
||||
}
|
||||
if (environment_variables) {
|
||||
const { setEnvironmentVariables } = workflowStore.getState()
|
||||
setEnvironmentVariables(environment_variables)
|
||||
}
|
||||
}, [featuresStore, workflowStore])
|
||||
|
||||
const {
|
||||
doSyncWorkflowDraft,
|
||||
|
||||
@@ -37,6 +37,7 @@ const typeList = [
|
||||
ChatVarType.ArrayString,
|
||||
ChatVarType.ArrayNumber,
|
||||
ChatVarType.ArrayObject,
|
||||
ChatVarType.ArrayFile,
|
||||
]
|
||||
|
||||
const objectPlaceholder = `# example
|
||||
@@ -127,6 +128,7 @@ const ChatVariableModal = ({
|
||||
case ChatVarType.ArrayString:
|
||||
case ChatVarType.ArrayNumber:
|
||||
case ChatVarType.ArrayObject:
|
||||
case ChatVarType.ArrayFile:
|
||||
return value?.filter(Boolean) || []
|
||||
}
|
||||
}
|
||||
@@ -294,84 +296,86 @@ const ChatVariableModal = ({
|
||||
</div>
|
||||
</div>
|
||||
{/* default value */}
|
||||
<div className='mb-4'>
|
||||
<div className='system-sm-semibold mb-1 flex h-6 items-center justify-between text-text-secondary'>
|
||||
<div>{t('workflow.chatVariable.modal.value')}</div>
|
||||
{(type === ChatVarType.ArrayString || type === ChatVarType.ArrayNumber) && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='small'
|
||||
className='text-text-tertiary'
|
||||
onClick={() => handleEditorChange(!editInJSON)}
|
||||
>
|
||||
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
|
||||
{editInJSON ? t('workflow.chatVariable.modal.oneByOne') : t('workflow.chatVariable.modal.editInJSON')}
|
||||
</Button>
|
||||
)}
|
||||
{type === ChatVarType.Object && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='small'
|
||||
className='text-text-tertiary'
|
||||
onClick={() => handleEditorChange(!editInJSON)}
|
||||
>
|
||||
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
|
||||
{editInJSON ? t('workflow.chatVariable.modal.editInForm') : t('workflow.chatVariable.modal.editInJSON')}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex'>
|
||||
{type === ChatVarType.String && (
|
||||
// Input will remove \n\r, so use Textarea just like description area
|
||||
<textarea
|
||||
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
|
||||
value={value}
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
onChange={e => setValue(e.target.value)}
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.Number && (
|
||||
<Input
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
value={value}
|
||||
onChange={e => setValue(Number(e.target.value))}
|
||||
type='number'
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.Object && !editInJSON && (
|
||||
<ObjectValueList
|
||||
list={objectValue}
|
||||
onChange={setObjectValue}
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.ArrayString && !editInJSON && (
|
||||
<ArrayValueList
|
||||
isString
|
||||
list={value || [undefined]}
|
||||
onChange={setValue}
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.ArrayNumber && !editInJSON && (
|
||||
<ArrayValueList
|
||||
isString={false}
|
||||
list={value || [undefined]}
|
||||
onChange={setValue}
|
||||
/>
|
||||
)}
|
||||
{editInJSON && (
|
||||
<div className='w-full rounded-[10px] bg-components-input-bg-normal py-2 pl-3 pr-1' style={{ height: editorMinHeight }}>
|
||||
<CodeEditor
|
||||
isExpand
|
||||
noWrapper
|
||||
language={CodeLanguage.json}
|
||||
value={editorContent}
|
||||
placeholder={<div className='whitespace-pre'>{placeholder}</div>}
|
||||
onChange={handleEditorValueChange}
|
||||
{type !== ChatVarType.ArrayFile && (
|
||||
<div className='mb-4'>
|
||||
<div className='system-sm-semibold mb-1 flex h-6 items-center justify-between text-text-secondary'>
|
||||
<div>{t('workflow.chatVariable.modal.value')}</div>
|
||||
{(type === ChatVarType.ArrayString || type === ChatVarType.ArrayNumber) && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='small'
|
||||
className='text-text-tertiary'
|
||||
onClick={() => handleEditorChange(!editInJSON)}
|
||||
>
|
||||
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
|
||||
{editInJSON ? t('workflow.chatVariable.modal.oneByOne') : t('workflow.chatVariable.modal.editInJSON')}
|
||||
</Button>
|
||||
)}
|
||||
{type === ChatVarType.Object && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='small'
|
||||
className='text-text-tertiary'
|
||||
onClick={() => handleEditorChange(!editInJSON)}
|
||||
>
|
||||
{editInJSON ? <RiInputField className='mr-1 h-3.5 w-3.5' /> : <RiDraftLine className='mr-1 h-3.5 w-3.5' />}
|
||||
{editInJSON ? t('workflow.chatVariable.modal.editInForm') : t('workflow.chatVariable.modal.editInJSON')}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex'>
|
||||
{type === ChatVarType.String && (
|
||||
// Input will remove \n\r, so use Textarea just like description area
|
||||
<textarea
|
||||
className='system-sm-regular placeholder:system-sm-regular block h-20 w-full resize-none appearance-none rounded-lg border border-transparent bg-components-input-bg-normal p-2 caret-primary-600 outline-none placeholder:text-components-input-text-placeholder hover:border-components-input-border-hover hover:bg-components-input-bg-hover focus:border-components-input-border-active focus:bg-components-input-bg-active focus:shadow-xs'
|
||||
value={value}
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
onChange={e => setValue(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
{type === ChatVarType.Number && (
|
||||
<Input
|
||||
placeholder={t('workflow.chatVariable.modal.valuePlaceholder') || ''}
|
||||
value={value}
|
||||
onChange={e => setValue(Number(e.target.value))}
|
||||
type='number'
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.Object && !editInJSON && (
|
||||
<ObjectValueList
|
||||
list={objectValue}
|
||||
onChange={setObjectValue}
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.ArrayString && !editInJSON && (
|
||||
<ArrayValueList
|
||||
isString
|
||||
list={value || [undefined]}
|
||||
onChange={setValue}
|
||||
/>
|
||||
)}
|
||||
{type === ChatVarType.ArrayNumber && !editInJSON && (
|
||||
<ArrayValueList
|
||||
isString={false}
|
||||
list={value || [undefined]}
|
||||
onChange={setValue}
|
||||
/>
|
||||
)}
|
||||
{editInJSON && (
|
||||
<div className='w-full rounded-[10px] bg-components-input-bg-normal py-2 pl-3 pr-1' style={{ height: editorMinHeight }}>
|
||||
<CodeEditor
|
||||
isExpand
|
||||
noWrapper
|
||||
language={CodeLanguage.json}
|
||||
value={editorContent}
|
||||
placeholder={<div className='whitespace-pre'>{placeholder}</div>}
|
||||
onChange={handleEditorValueChange}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
{/* description */}
|
||||
<div className=''>
|
||||
<div className='system-sm-semibold mb-1 flex h-6 items-center text-text-secondary'>{t('workflow.chatVariable.modal.description')}</div>
|
||||
|
||||
@@ -5,4 +5,5 @@ export enum ChatVarType {
|
||||
ArrayString = 'array[string]',
|
||||
ArrayNumber = 'array[number]',
|
||||
ArrayObject = 'array[object]',
|
||||
ArrayFile = 'array[file]',
|
||||
}
|
||||
|
||||
@@ -86,6 +86,8 @@ const UpdateDSLModal = ({
|
||||
graph,
|
||||
features,
|
||||
hash,
|
||||
conversation_variables,
|
||||
environment_variables,
|
||||
} = await fetchWorkflowDraft(`/apps/${app_id}/workflows/draft`)
|
||||
|
||||
const { nodes, edges, viewport } = graph
|
||||
@@ -122,6 +124,8 @@ const UpdateDSLModal = ({
|
||||
viewport,
|
||||
features: newFeatures,
|
||||
hash,
|
||||
conversation_variables: conversation_variables || [],
|
||||
environment_variables: environment_variables || [],
|
||||
},
|
||||
} as any)
|
||||
}, [eventEmitter])
|
||||
|
||||
@@ -19,19 +19,87 @@ import { CUSTOM_ITERATION_START_NODE } from '../nodes/iteration-start/constants'
|
||||
import { CUSTOM_LOOP_START_NODE } from '../nodes/loop-start/constants'
|
||||
|
||||
export const getLayoutByDagre = (originNodes: Node[], originEdges: Edge[]) => {
|
||||
const dagreGraph = new dagre.graphlib.Graph()
|
||||
const dagreGraph = new dagre.graphlib.Graph({ compound: true })
|
||||
dagreGraph.setDefaultEdgeLabel(() => ({}))
|
||||
|
||||
const nodes = cloneDeep(originNodes).filter(node => !node.parentId && node.type === CUSTOM_NODE)
|
||||
const edges = cloneDeep(originEdges).filter(edge => (!edge.data?.isInIteration && !edge.data?.isInLoop))
|
||||
|
||||
// The default dagre layout algorithm often fails to correctly order the branches
|
||||
// of an If/Else node, leading to crossed edges.
|
||||
//
|
||||
// To solve this, we employ a "virtual container" strategy:
|
||||
// 1. A virtual, compound parent node (the "container") is created for each If/Else node's branches.
|
||||
// 2. Each direct child of the If/Else node is preceded by a virtual dummy node. These dummies are placed inside the container.
|
||||
// 3. A rigid, sequential chain of invisible edges is created between these dummy nodes (e.g., dummy_IF -> dummy_ELIF -> dummy_ELSE).
|
||||
//
|
||||
// This forces dagre to treat the ordered branches as an unbreakable, atomic group,
|
||||
// ensuring their layout respects the intended logical sequence.
|
||||
const ifElseNodes = nodes.filter(node => node.data.type === BlockEnum.IfElse)
|
||||
let virtualLogicApplied = false
|
||||
|
||||
ifElseNodes.forEach((ifElseNode) => {
|
||||
const childEdges = edges.filter(e => e.source === ifElseNode.id)
|
||||
if (childEdges.length <= 1)
|
||||
return
|
||||
|
||||
virtualLogicApplied = true
|
||||
const sortedChildEdges = childEdges.sort((edgeA, edgeB) => {
|
||||
const handleA = edgeA.sourceHandle
|
||||
const handleB = edgeB.sourceHandle
|
||||
|
||||
if (handleA && handleB) {
|
||||
const cases = (ifElseNode.data as any).cases || []
|
||||
const isAElse = handleA === 'false'
|
||||
const isBElse = handleB === 'false'
|
||||
|
||||
if (isAElse) return 1
|
||||
if (isBElse) return -1
|
||||
|
||||
const indexA = cases.findIndex((c: any) => c.case_id === handleA)
|
||||
const indexB = cases.findIndex((c: any) => c.case_id === handleB)
|
||||
|
||||
if (indexA !== -1 && indexB !== -1)
|
||||
return indexA - indexB
|
||||
}
|
||||
return 0
|
||||
})
|
||||
|
||||
const parentDummyId = `dummy-parent-${ifElseNode.id}`
|
||||
dagreGraph.setNode(parentDummyId, { width: 1, height: 1 })
|
||||
|
||||
const dummyNodes: string[] = []
|
||||
sortedChildEdges.forEach((edge) => {
|
||||
const dummyNodeId = `dummy-${edge.source}-${edge.target}`
|
||||
dummyNodes.push(dummyNodeId)
|
||||
dagreGraph.setNode(dummyNodeId, { width: 1, height: 1 })
|
||||
dagreGraph.setParent(dummyNodeId, parentDummyId)
|
||||
|
||||
const edgeIndex = edges.findIndex(e => e.id === edge.id)
|
||||
if (edgeIndex > -1)
|
||||
edges.splice(edgeIndex, 1)
|
||||
|
||||
edges.push({ id: `e-${edge.source}-${dummyNodeId}`, source: edge.source, target: dummyNodeId, sourceHandle: edge.sourceHandle } as Edge)
|
||||
edges.push({ id: `e-${dummyNodeId}-${edge.target}`, source: dummyNodeId, target: edge.target, targetHandle: edge.targetHandle } as Edge)
|
||||
})
|
||||
|
||||
for (let i = 0; i < dummyNodes.length - 1; i++) {
|
||||
const sourceDummy = dummyNodes[i]
|
||||
const targetDummy = dummyNodes[i + 1]
|
||||
edges.push({ id: `e-dummy-${sourceDummy}-${targetDummy}`, source: sourceDummy, target: targetDummy } as Edge)
|
||||
}
|
||||
})
|
||||
|
||||
dagreGraph.setGraph({
|
||||
rankdir: 'LR',
|
||||
align: 'UL',
|
||||
nodesep: 40,
|
||||
ranksep: 60,
|
||||
ranksep: virtualLogicApplied ? 30 : 60,
|
||||
ranker: 'tight-tree',
|
||||
marginx: 30,
|
||||
marginy: 200,
|
||||
})
|
||||
|
||||
nodes.forEach((node) => {
|
||||
dagreGraph.setNode(node.id, {
|
||||
width: node.width!,
|
||||
@@ -1,7 +1,7 @@
|
||||
export * from './node'
|
||||
export * from './edge'
|
||||
export * from './workflow-init'
|
||||
export * from './layout'
|
||||
export * from './dagre-layout'
|
||||
export * from './common'
|
||||
export * from './tool'
|
||||
export * from './workflow'
|
||||
|
||||
@@ -154,10 +154,6 @@ export const ProviderContextProvider = ({
|
||||
setIsFetchedPlan(true)
|
||||
}
|
||||
|
||||
if (data.model_load_balancing_enabled)
|
||||
setModelLoadBalancingEnabled(true)
|
||||
if (data.dataset_operator_enabled)
|
||||
setDatasetOperatorEnabled(true)
|
||||
if (data.model_load_balancing_enabled)
|
||||
setModelLoadBalancingEnabled(true)
|
||||
if (data.dataset_operator_enabled)
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
readyToInstall: 'Über die Installation des folgenden Plugins',
|
||||
dropPluginToInstall: 'Legen Sie das Plugin-Paket hier ab, um es zu installieren',
|
||||
next: 'Nächster',
|
||||
installWarning: 'Dieses Plugin darf nicht installiert werden.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
selectPackagePlaceholder: 'Bitte wählen Sie ein Paket aus',
|
||||
@@ -173,7 +174,7 @@ const translation = {
|
||||
recentlyUpdated: 'Kürzlich aktualisiert',
|
||||
},
|
||||
viewMore: 'Mehr anzeigen',
|
||||
sortBy: 'Schwarze Stadt',
|
||||
sortBy: 'Sortieren nach',
|
||||
discover: 'Entdecken',
|
||||
noPluginFound: 'Kein Plugin gefunden',
|
||||
difyMarketplace: 'Dify Marktplatz',
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
dropPluginToInstall: 'Suelte el paquete del complemento aquí para instalarlo',
|
||||
readyToInstallPackage: 'A punto de instalar el siguiente plugin',
|
||||
installedSuccessfully: 'Instalación exitosa',
|
||||
installWarning: 'Este plugin no está permitido para instalar.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
uploadFailed: 'Error de carga',
|
||||
@@ -175,7 +176,7 @@ const translation = {
|
||||
empower: 'Potencie su desarrollo de IA',
|
||||
moreFrom: 'Más de Marketplace',
|
||||
viewMore: 'Ver más',
|
||||
sortBy: 'Ciudad negra',
|
||||
sortBy: 'Ordenar por',
|
||||
noPluginFound: 'No se ha encontrado ningún plugin',
|
||||
pluginsResult: '{{num}} resultados',
|
||||
discover: 'Descubrir',
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
next: 'Prochain',
|
||||
installPlugin: 'Installer le plugin',
|
||||
installFailedDesc: 'L’installation du plug-in a échoué.',
|
||||
installWarning: 'Ce plugin n’est pas autorisé à être installé.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
installFailed: 'Échec de l’installation',
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
installing: 'Installazione...',
|
||||
install: 'Installare',
|
||||
readyToInstallPackages: 'Sto per installare i seguenti plugin {{num}}',
|
||||
installWarning: 'Questo plugin non è consentito essere installato.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
installedSuccessfully: 'Installazione riuscita',
|
||||
@@ -178,7 +179,7 @@ const translation = {
|
||||
pluginsResult: '{{num}} risultati',
|
||||
noPluginFound: 'Nessun plug-in trovato',
|
||||
empower: 'Potenzia lo sviluppo dell\'intelligenza artificiale',
|
||||
sortBy: 'Città nera',
|
||||
sortBy: 'Ordina per',
|
||||
and: 'e',
|
||||
viewMore: 'Vedi di più',
|
||||
verifiedTip: 'Verificato da Dify',
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
installing: 'Instalar...',
|
||||
uploadingPackage: 'Carregando {{packageName}} ...',
|
||||
dropPluginToInstall: 'Solte o pacote de plug-in aqui para instalar',
|
||||
installWarning: 'Este plugin não é permitido ser instalado.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
selectVersionPlaceholder: 'Selecione uma versão',
|
||||
@@ -172,7 +173,7 @@ const translation = {
|
||||
recentlyUpdated: 'Atualizado recentemente',
|
||||
newlyReleased: 'Recém-lançado',
|
||||
},
|
||||
sortBy: 'Cidade negra',
|
||||
sortBy: 'Ordenar por',
|
||||
viewMore: 'Ver mais',
|
||||
and: 'e',
|
||||
pluginsResult: '{{num}} resultados',
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
pluginLoadErrorDesc: 'Acest plugin nu va fi instalat',
|
||||
installedSuccessfullyDesc: 'Pluginul a fost instalat cu succes.',
|
||||
readyToInstall: 'Despre instalarea următorului plugin',
|
||||
installWarning: 'Acest plugin nu este permis să fie instalat.',
|
||||
},
|
||||
installFromGitHub: {
|
||||
installFailed: 'Instalarea a eșuat',
|
||||
@@ -173,7 +174,7 @@ const translation = {
|
||||
firstReleased: 'Prima lansare',
|
||||
},
|
||||
noPluginFound: 'Nu s-a găsit niciun plugin',
|
||||
sortBy: 'Orașul negru',
|
||||
sortBy: 'Sortează după',
|
||||
discover: 'Descoperi',
|
||||
empower: 'Îmbunătățește-ți dezvoltarea AI',
|
||||
pluginsResult: '{{num}} rezultate',
|
||||
|
||||
@@ -71,7 +71,7 @@ const translation = {
|
||||
annotated: '已標註改進({{count}} 項)',
|
||||
not_annotated: '未標註',
|
||||
},
|
||||
sortBy: '排序方式:',
|
||||
sortBy: '排序:',
|
||||
descending: '降序',
|
||||
ascending: '升序',
|
||||
},
|
||||
|
||||
@@ -137,6 +137,7 @@ const translation = {
|
||||
cancel: '取消',
|
||||
installPlugin: '安裝插件',
|
||||
installing: '安裝。。。',
|
||||
installWarning: '此插件不允許安裝。',
|
||||
},
|
||||
installFromGitHub: {
|
||||
gitHubRepo: 'GitHub 儲存庫',
|
||||
@@ -177,7 +178,7 @@ const translation = {
|
||||
empower: '為您的 AI 開發提供支援',
|
||||
moreFrom: '來自 Marketplace 的更多內容',
|
||||
and: '和',
|
||||
sortBy: '黑城',
|
||||
sortBy: '排序方式',
|
||||
viewMore: '查看更多',
|
||||
difyMarketplace: 'Dify 市場',
|
||||
pluginsResult: '{{num}} 個結果',
|
||||
|
||||
Reference in New Issue
Block a user