Compare commits

...

36 Commits

Author SHA1 Message Date
John Wang
aae9658af2 feat: upgrade xinference to 0.2.1 which support stream response 2023-08-23 20:15:10 +08:00
Matri
916d8be0ae fix: activation page reload issue after activating (#964) 2023-08-23 13:54:40 +08:00
crazywoola
a38412de7b update doc (#965) 2023-08-23 12:29:52 +08:00
Matri
9c9f0ddb93 fix: user activation request 404 issue (#963) 2023-08-23 08:57:25 +08:00
takatost
f8fbe96da4 feat: bump version to 0.3.15 (#959) 2023-08-22 18:20:33 +08:00
zxhlyh
215a27fd95 Feat/add xinference openllm provider (#958) 2023-08-22 18:19:10 +08:00
takatost
5cba2e7087 fix: web reader tool retrieve content empty (#957) 2023-08-22 18:01:16 +08:00
Jyong
5623839c71 update document segment (#950)
Co-authored-by: jyong <jyong@dify.ai>
2023-08-22 17:59:24 +08:00
takatost
78d3aa5fcd fix: embedding init err (#956) 2023-08-22 17:43:59 +08:00
zxhlyh
a7c78d2cd2 fix: spark provider field name (#955) 2023-08-22 17:28:18 +08:00
Joel
4db35fa375 chore: obsolete info api use new api (#954) 2023-08-22 16:59:57 +08:00
Joel
e67a1413b6 chore: create btn to first place (#953) 2023-08-22 16:20:56 +08:00
takatost
4f3053a8cc fix: xinference chat completion error (#952) 2023-08-22 15:58:04 +08:00
zxhlyh
b3c2bf125f Feat/model providers (#951) 2023-08-22 15:38:12 +08:00
zxhlyh
9d5299e9ec fix: segment error tip & save segment disable when loading (#949) 2023-08-22 15:22:16 +08:00
Jyong
aee15adf1b update document segment (#948)
Co-authored-by: jyong <jyong@dify.ai>
2023-08-22 15:19:09 +08:00
zxhlyh
b185a70c21 Fix/speech to text button (#947) 2023-08-22 14:55:20 +08:00
takatost
a3aba7a9aa fix: provider model not delete when reset key pair (#946) 2023-08-22 13:48:58 +08:00
takatost
866ee5da91 fix: openllm generate cutoff (#945) 2023-08-22 13:43:36 +08:00
Matri
e8039a7da8 fix: add flex-wrap to categories container (#944) 2023-08-22 13:39:52 +08:00
bowen
5e0540077a chore: perfect type definition (#940) 2023-08-22 10:58:06 +08:00
Matri
b346bd9b83 fix: default language improvement in activation page (#942) 2023-08-22 09:28:37 +08:00
Matri
062e2e915b fix: login improvement (#941) 2023-08-21 21:26:32 +08:00
takatost
e0a48c4972 fix: xinference chat support (#939) 2023-08-21 20:44:29 +08:00
zxhlyh
f53242c081 Feat/add document status tooltip (#937) 2023-08-21 18:07:51 +08:00
Jyong
4b53bb1a32 Feat/token support (#909)
Co-authored-by: StyleZhang <jasonapring2015@outlook.com>
Co-authored-by: jyong <jyong@dify.ai>
2023-08-21 13:57:18 +08:00
takatost
4c49ecedb5 feat: optimize web reader summary in 3.5 (#933) 2023-08-21 11:58:01 +08:00
takatost
4ff1870a4b fix: web reader tool missing nodejs (#932) 2023-08-21 11:26:11 +08:00
takatost
6c832ee328 fix: remove openllm pypi package because of this package too large (#931) 2023-08-21 02:12:28 +08:00
takatost
25264e7852 feat: add xinference embedding model support (#930) 2023-08-20 19:35:07 +08:00
takatost
18dd0d569d fix: xinference max_tokens alisa error (#929) 2023-08-20 19:12:52 +08:00
takatost
3ea8d7a019 feat: add openllm support (#928) 2023-08-20 19:04:33 +08:00
takatost
da3f10a55e feat: server xinference support (#927) 2023-08-20 17:46:41 +08:00
Benjamin
8c991b5b26 Fix Readme.md typo error. (#926) 2023-08-20 12:02:04 +08:00
takatost
22c1aafb9b fix: document paused at format error (#925) 2023-08-20 01:54:12 +08:00
takatost
8d6d1c442b feat: optimize generate name length (#924) 2023-08-19 23:34:38 +08:00
140 changed files with 2919 additions and 560 deletions

View File

@@ -30,7 +30,7 @@ Visual data analysis, log review, and annotation for applications
- [x] **Spark**
- [x] **Wenxin**
- [x] **Tongyi**
- [x] **ChatGLM**
We provide the following free resources for registered Dify cloud users (sign up at [dify.ai](https://dify.ai)):
* 600,000 free Claude model tokens to build Claude-powered apps

View File

@@ -16,7 +16,7 @@ EXPOSE 5001
WORKDIR /app/api
RUN apt-get update && \
apt-get install -y bash curl wget vim gcc g++ python3-dev libc-dev libffi-dev
apt-get install -y bash curl wget vim gcc g++ python3-dev libc-dev libffi-dev nodejs
COPY requirements.txt /app/api/requirements.txt

View File

@@ -20,7 +20,7 @@ from models.model import Account
import secrets
import base64
from models.provider import Provider, ProviderType, ProviderQuotaType
from models.provider import Provider, ProviderType, ProviderQuotaType, ProviderModel
@click.command('reset-password', help='Reset the account password.')
@@ -102,6 +102,7 @@ def reset_encrypt_key_pair():
tenant.encrypt_public_key = generate_key_pair(tenant.id)
db.session.query(Provider).filter(Provider.provider_type == 'custom').delete()
db.session.query(ProviderModel).delete()
db.session.commit()
click.echo(click.style('Congratulations! '

View File

@@ -100,7 +100,7 @@ class Config:
self.CONSOLE_URL = get_env('CONSOLE_URL')
self.API_URL = get_env('API_URL')
self.APP_URL = get_env('APP_URL')
self.CURRENT_VERSION = "0.3.14"
self.CURRENT_VERSION = "0.3.15"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = "SELF_HOSTED"
self.DEPLOY_ENV = get_env('DEPLOY_ENV')

View File

@@ -1,4 +1,5 @@
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
import flask_restful
from flask_restful import Resource, fields, marshal_with
from werkzeug.exceptions import Forbidden

View File

@@ -3,7 +3,9 @@ import json
import logging
from datetime import datetime
from flask_login import login_required, current_user
import flask
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal_with, abort, inputs
from werkzeug.exceptions import Forbidden
@@ -316,7 +318,7 @@ class AppApi(Resource):
if current_user.current_tenant.current_role not in ['admin', 'owner']:
raise Forbidden()
app = _get_app(app_id, current_user.current_tenant_id)
db.session.delete(app)

View File

@@ -2,7 +2,7 @@
import logging
from flask import request
from flask_login import login_required
from core.login.login import login_required
from werkzeug.exceptions import InternalServerError, NotFound
import services

View File

@@ -5,7 +5,7 @@ from typing import Generator, Union
import flask_login
from flask import Response, stream_with_context
from flask_login import login_required
from core.login.login import login_required
from werkzeug.exceptions import InternalServerError, NotFound
import services

View File

@@ -1,7 +1,8 @@
from datetime import datetime
import pytz
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal_with
from flask_restful.inputs import int_range
from sqlalchemy import or_, func

View File

@@ -1,4 +1,5 @@
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse
from controllers.console import api

View File

@@ -3,7 +3,7 @@ import logging
from typing import Union, Generator
from flask import Response, stream_with_context
from flask_login import current_user, login_required
from flask_login import current_user
from flask_restful import Resource, reqparse, marshal_with, fields
from flask_restful.inputs import int_range
from werkzeug.exceptions import InternalServerError, NotFound
@@ -16,6 +16,7 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.model_providers.error import LLMRateLimitError, LLMBadRequestError, LLMAuthorizationError, LLMAPIConnectionError, \
ProviderTokenNotInitError, LLMAPIUnavailableError, QuotaExceededError, ModelCurrentlyNotSupportError
from core.login.login import login_required
from libs.helper import uuid_value, TimestampField
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from extensions.ext_database import db

View File

@@ -3,12 +3,13 @@ import json
from flask import request
from flask_restful import Resource
from flask_login import login_required, current_user
from flask_login import current_user
from controllers.console import api
from controllers.console.app import _get_app
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.login.login import login_required
from events.app_event import app_model_config_was_updated
from extensions.ext_database import db
from models.model import AppModelConfig

View File

@@ -1,5 +1,6 @@
# -*- coding:utf-8 -*-
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal_with
from werkzeug.exceptions import NotFound, Forbidden

View File

@@ -4,7 +4,8 @@ from datetime import datetime
import pytz
from flask import jsonify
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse
from controllers.console import api

View File

@@ -5,9 +5,12 @@ from typing import Optional
import flask_login
import requests
from flask import request, redirect, current_app, session
from flask_login import current_user, login_required
from flask_login import current_user
from flask_restful import Resource
from werkzeug.exceptions import Forbidden
from core.login.login import login_required
from libs.oauth_data_source import NotionOAuth
from controllers.console import api
from ..setup import setup_required

View File

@@ -3,7 +3,8 @@ import json
from cachetools import TTLCache
from flask import request, current_app
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, marshal_with, fields, reqparse, marshal
from werkzeug.exceptions import NotFound

View File

@@ -1,6 +1,7 @@
# -*- coding:utf-8 -*-
from flask import request
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal, marshal_with
from werkzeug.exceptions import NotFound, Forbidden
import services

View File

@@ -4,7 +4,8 @@ from datetime import datetime
from typing import List
from flask import request
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, fields, marshal, marshal_with, reqparse
from sqlalchemy import desc, asc
from werkzeug.exceptions import NotFound, Forbidden
@@ -764,11 +765,13 @@ class DocumentMetadataApi(DocumentResource):
metadata_schema = DocumentService.DOCUMENT_METADATA_SCHEMA[doc_type]
document.doc_metadata = {}
for key, value_type in metadata_schema.items():
value = doc_metadata.get(key)
if value is not None and isinstance(value, value_type):
document.doc_metadata[key] = value
if doc_type == 'others':
document.doc_metadata = doc_metadata
else:
for key, value_type in metadata_schema.items():
value = doc_metadata.get(key)
if value is not None and isinstance(value, value_type):
document.doc_metadata[key] = value
document.doc_type = doc_type
document.updated_at = datetime.utcnow()

View File

@@ -1,9 +1,8 @@
# -*- coding:utf-8 -*-
import uuid
from datetime import datetime
from flask import request
from flask_login import login_required, current_user
from flask_login import current_user
from flask_restful import Resource, reqparse, fields, marshal
from werkzeug.exceptions import NotFound, Forbidden
@@ -15,6 +14,7 @@ from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.model_providers.error import LLMBadRequestError, ProviderTokenNotInitError
from core.model_providers.model_factory import ModelFactory
from core.login.login import login_required
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.dataset import DocumentSegment

View File

@@ -8,7 +8,8 @@ from pathlib import Path
from cachetools import TTLCache
from flask import request, current_app
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, marshal_with, fields
from werkzeug.exceptions import NotFound

View File

@@ -1,6 +1,7 @@
import logging
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, marshal, fields
from werkzeug.exceptions import InternalServerError, NotFound, Forbidden

View File

@@ -1,7 +1,8 @@
# -*- coding:utf-8 -*-
from datetime import datetime
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal_with, inputs
from sqlalchemy import and_
from werkzeug.exceptions import NotFound, Forbidden, BadRequest

View File

@@ -1,5 +1,6 @@
# -*- coding:utf-8 -*-
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, fields, marshal_with
from sqlalchemy import and_

View File

@@ -1,4 +1,5 @@
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource
from functools import wraps

View File

@@ -1,7 +1,8 @@
import json
from functools import wraps
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required

View File

@@ -3,7 +3,8 @@ from datetime import datetime
import pytz
from flask import current_app, request
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, fields, marshal_with
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError

View File

@@ -1,6 +1,7 @@
# -*- coding:utf-8 -*-
from flask import current_app
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse, marshal_with, abort, fields, marshal
import services

View File

@@ -1,4 +1,5 @@
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse
from werkzeug.exceptions import Forbidden

View File

@@ -1,4 +1,5 @@
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse
from controllers.console import api

View File

@@ -1,5 +1,6 @@
# -*- coding:utf-8 -*-
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, reqparse
from werkzeug.exceptions import Forbidden

View File

@@ -1,6 +1,7 @@
import json
from flask_login import login_required, current_user
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, abort, reqparse
from werkzeug.exceptions import Forbidden

View File

@@ -2,10 +2,13 @@
import logging
from flask import request
from flask_login import login_required, current_user
from flask_restful import Resource, fields, marshal_with, reqparse, marshal
from flask_login import current_user
from core.login.login import login_required
from flask_restful import Resource, fields, marshal_with, reqparse, marshal, inputs
from flask_restful.inputs import int_range
from controllers.console import api
from controllers.console.admin import admin_required
from controllers.console.setup import setup_required
from controllers.console.error import AccountNotLinkTenantError
from controllers.console.wraps import account_initialization_required
@@ -43,6 +46,13 @@ tenants_fields = {
'current': fields.Boolean
}
workspace_fields = {
'id': fields.String,
'name': fields.String,
'status': fields.String,
'created_at': TimestampField
}
class TenantListApi(Resource):
@setup_required
@@ -57,6 +67,38 @@ class TenantListApi(Resource):
return {'workspaces': marshal(tenants, tenants_fields)}, 200
class WorkspaceListApi(Resource):
@setup_required
@admin_required
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('page', type=inputs.int_range(1, 99999), required=False, default=1, location='args')
parser.add_argument('limit', type=inputs.int_range(1, 100), required=False, default=20, location='args')
args = parser.parse_args()
tenants = db.session.query(Tenant).order_by(Tenant.created_at.desc())\
.paginate(page=args['page'], per_page=args['limit'])
has_more = False
if len(tenants.items) == args['limit']:
current_page_first_tenant = tenants[-1]
rest_count = db.session.query(Tenant).filter(
Tenant.created_at < current_page_first_tenant.created_at,
Tenant.id != current_page_first_tenant.id
).count()
if rest_count > 0:
has_more = True
total = db.session.query(Tenant).count()
return {
'data': marshal(tenants.items, workspace_fields),
'has_more': has_more,
'limit': args['limit'],
'page': args['page'],
'total': total
}, 200
class TenantApi(Resource):
@setup_required
@login_required
@@ -92,6 +134,7 @@ class SwitchWorkspaceApi(Resource):
api.add_resource(TenantListApi, '/workspaces') # GET for getting all tenants
api.add_resource(WorkspaceListApi, '/all-workspaces') # GET for getting all tenants
api.add_resource(TenantApi, '/workspaces/current', endpoint='workspaces_current') # GET for getting current tenant info
api.add_resource(TenantApi, '/info', endpoint='info') # Deprecated
api.add_resource(SwitchWorkspaceApi, '/workspaces/switch') # POST for switching tenant

View File

@@ -103,6 +103,7 @@ class DatesetDocumentStore:
content=doc.page_content,
word_count=len(doc.page_content),
tokens=tokens,
enabled=False,
created_by=self._user_id,
)
if 'answer' in doc.metadata and doc.metadata['answer']:

View File

@@ -691,6 +691,7 @@ class IndexingRunner:
DocumentSegment.status == "indexing"
).update({
DocumentSegment.status: "completed",
DocumentSegment.enabled: True,
DocumentSegment.completed_at: datetime.datetime.utcnow()
})

108
api/core/login/login.py Normal file
View File

@@ -0,0 +1,108 @@
import os
from functools import wraps
import flask_login
from flask import current_app
from flask import g
from flask import has_request_context
from flask import request
from flask_login import user_logged_in
from flask_login.config import EXEMPT_METHODS
from werkzeug.exceptions import Unauthorized
from werkzeug.local import LocalProxy
from extensions.ext_database import db
from models.account import Account, Tenant, TenantAccountJoin
#: A proxy for the current user. If no user is logged in, this will be an
#: anonymous user
current_user = LocalProxy(lambda: _get_user())
def login_required(func):
"""
If you decorate a view with this, it will ensure that the current user is
logged in and authenticated before calling the actual view. (If they are
not, it calls the :attr:`LoginManager.unauthorized` callback.) For
example::
@app.route('/post')
@login_required
def post():
pass
If there are only certain times you need to require that your user is
logged in, you can do so with::
if not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
...which is essentially the code that this function adds to your views.
It can be convenient to globally turn off authentication when unit testing.
To enable this, if the application configuration variable `LOGIN_DISABLED`
is set to `True`, this decorator will be ignored.
.. Note ::
Per `W3 guidelines for CORS preflight requests
<http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,
HTTP ``OPTIONS`` requests are exempt from login checks.
:param func: The view function to decorate.
:type func: function
"""
@wraps(func)
def decorated_view(*args, **kwargs):
auth_header = request.headers.get('Authorization')
admin_api_key_enable = os.getenv('ADMIN_API_KEY_ENABLE', default='False')
if admin_api_key_enable:
if auth_header:
if ' ' not in auth_header:
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != 'bearer':
raise Unauthorized('Invalid Authorization header format. Expected \'Bearer <api-key>\' format.')
admin_api_key = os.getenv('ADMIN_API_KEY')
if admin_api_key:
if os.getenv('ADMIN_API_KEY') == auth_token:
workspace_id = request.headers.get('X-WORKSPACE-ID')
if workspace_id:
tenant_account_join = db.session.query(Tenant, TenantAccountJoin) \
.filter(Tenant.id == workspace_id) \
.filter(TenantAccountJoin.tenant_id == Tenant.id) \
.filter(TenantAccountJoin.role == 'owner') \
.one_or_none()
if tenant_account_join:
tenant, ta = tenant_account_join
account = Account.query.filter_by(id=ta.account_id).first()
# Login admin
if account:
account.current_tenant = tenant
current_app.login_manager._update_request_context_with_user(account)
user_logged_in.send(current_app._get_current_object(), user=_get_user())
if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"):
pass
elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized()
# flask 1.x compatibility
# current_app.ensure_sync is only available in Flask >= 2.0
if callable(getattr(current_app, "ensure_sync", None)):
return current_app.ensure_sync(func)(*args, **kwargs)
return func(*args, **kwargs)
return decorated_view
def _get_user():
if has_request_context():
if "_login_user" not in g:
current_app.login_manager._load_user()
return g._login_user
return None

View File

@@ -57,6 +57,12 @@ class ModelProviderFactory:
elif provider_name == 'huggingface_hub':
from core.model_providers.providers.huggingface_hub_provider import HuggingfaceHubProvider
return HuggingfaceHubProvider
elif provider_name == 'xinference':
from core.model_providers.providers.xinference_provider import XinferenceProvider
return XinferenceProvider
elif provider_name == 'openllm':
from core.model_providers.providers.openllm_provider import OpenLLMProvider
return OpenLLMProvider
else:
raise NotImplementedError

View File

@@ -0,0 +1,27 @@
from core.third_party.langchain.embeddings.xinference_embedding import XinferenceEmbedding as XinferenceEmbeddings
from replicate.exceptions import ModelError, ReplicateError
from core.model_providers.error import LLMBadRequestError
from core.model_providers.providers.base import BaseModelProvider
from core.model_providers.models.embedding.base import BaseEmbedding
class XinferenceEmbedding(BaseEmbedding):
def __init__(self, model_provider: BaseModelProvider, name: str):
credentials = model_provider.get_model_credentials(
model_name=name,
model_type=self.type
)
client = XinferenceEmbeddings(
server_url=credentials['server_url'],
model_uid=credentials['model_uid'],
)
super().__init__(model_provider, client, name)
def handle_exceptions(self, ex: Exception) -> Exception:
if isinstance(ex, (ModelError, ReplicateError)):
return LLMBadRequestError(f"Xinference embedding: {str(ex)}")
else:
return ex

View File

@@ -0,0 +1,60 @@
from typing import List, Optional, Any
from langchain.callbacks.manager import Callbacks
from langchain.schema import LLMResult
from core.model_providers.error import LLMBadRequestError
from core.model_providers.models.llm.base import BaseLLM
from core.model_providers.models.entity.message import PromptMessage
from core.model_providers.models.entity.model_params import ModelMode, ModelKwargs
from core.third_party.langchain.llms.openllm import OpenLLM
class OpenLLMModel(BaseLLM):
model_mode: ModelMode = ModelMode.COMPLETION
def _init_client(self) -> Any:
self.provider_model_kwargs = self._to_model_kwargs_input(self.model_rules, self.model_kwargs)
client = OpenLLM(
server_url=self.credentials.get('server_url'),
callbacks=self.callbacks,
llm_kwargs=self.provider_model_kwargs
)
return client
def _run(self, messages: List[PromptMessage],
stop: Optional[List[str]] = None,
callbacks: Callbacks = None,
**kwargs) -> LLMResult:
"""
run predict by prompt messages and stop words.
:param messages:
:param stop:
:param callbacks:
:return:
"""
prompts = self._get_prompt_from_messages(messages)
return self._client.generate([prompts], stop, callbacks)
def get_num_tokens(self, messages: List[PromptMessage]) -> int:
"""
get num tokens of prompt messages.
:param messages:
:return:
"""
prompts = self._get_prompt_from_messages(messages)
return max(self._client.get_num_tokens(prompts), 0)
def _set_model_kwargs(self, model_kwargs: ModelKwargs):
pass
def handle_exceptions(self, ex: Exception) -> Exception:
return LLMBadRequestError(f"OpenLLM: {str(ex)}")
@classmethod
def support_streaming(cls):
return False

View File

@@ -0,0 +1,70 @@
from typing import List, Optional, Any
from langchain.callbacks.manager import Callbacks
from langchain.schema import LLMResult
from core.model_providers.error import LLMBadRequestError
from core.model_providers.models.llm.base import BaseLLM
from core.model_providers.models.entity.message import PromptMessage
from core.model_providers.models.entity.model_params import ModelMode, ModelKwargs
from core.third_party.langchain.llms.xinference_llm import XinferenceLLM
class XinferenceModel(BaseLLM):
model_mode: ModelMode = ModelMode.COMPLETION
def _init_client(self) -> Any:
self.provider_model_kwargs = self._to_model_kwargs_input(self.model_rules, self.model_kwargs)
client = XinferenceLLM(
server_url=self.credentials['server_url'],
model_uid=self.credentials['model_uid'],
)
client.callbacks = self.callbacks
return client
def _run(self, messages: List[PromptMessage],
stop: Optional[List[str]] = None,
callbacks: Callbacks = None,
**kwargs) -> LLMResult:
"""
run predict by prompt messages and stop words.
:param messages:
:param stop:
:param callbacks:
:return:
"""
prompts = self._get_prompt_from_messages(messages)
return self._client.generate(
[prompts],
stop,
callbacks,
generate_config={
"stop": stop,
"stream": self.streaming,
**self.provider_model_kwargs,
}
)
def get_num_tokens(self, messages: List[PromptMessage]) -> int:
"""
get num tokens of prompt messages.
:param messages:
:return:
"""
prompts = self._get_prompt_from_messages(messages)
return max(self._client.get_num_tokens(prompts), 0)
def _set_model_kwargs(self, model_kwargs: ModelKwargs):
pass
def handle_exceptions(self, ex: Exception) -> Exception:
return LLMBadRequestError(f"Xinference: {str(ex)}")
@classmethod
def support_streaming(cls):
return True

View File

@@ -0,0 +1,138 @@
import json
from typing import Type
from core.helper import encrypter
from core.model_providers.models.entity.model_params import KwargRule, ModelKwargsRules, ModelType
from core.model_providers.models.llm.openllm_model import OpenLLMModel
from core.model_providers.providers.base import BaseModelProvider, CredentialsValidateFailedError
from core.model_providers.models.base import BaseProviderModel
from core.third_party.langchain.llms.openllm import OpenLLM
from models.provider import ProviderType
class OpenLLMProvider(BaseModelProvider):
@property
def provider_name(self):
"""
Returns the name of a provider.
"""
return 'openllm'
def _get_fixed_model_list(self, model_type: ModelType) -> list[dict]:
return []
def get_model_class(self, model_type: ModelType) -> Type[BaseProviderModel]:
"""
Returns the model class.
:param model_type:
:return:
"""
if model_type == ModelType.TEXT_GENERATION:
model_class = OpenLLMModel
else:
raise NotImplementedError
return model_class
def get_model_parameter_rules(self, model_name: str, model_type: ModelType) -> ModelKwargsRules:
"""
get model parameter rules.
:param model_name:
:param model_type:
:return:
"""
return ModelKwargsRules(
temperature=KwargRule[float](min=0.01, max=2, default=1),
top_p=KwargRule[float](min=0, max=1, default=0.7),
presence_penalty=KwargRule[float](min=-2, max=2, default=0),
frequency_penalty=KwargRule[float](min=-2, max=2, default=0),
max_tokens=KwargRule[int](alias='max_new_tokens', min=10, max=4000, default=128),
)
@classmethod
def is_model_credentials_valid_or_raise(cls, model_name: str, model_type: ModelType, credentials: dict):
"""
check model credentials valid.
:param model_name:
:param model_type:
:param credentials:
"""
if 'server_url' not in credentials:
raise CredentialsValidateFailedError('OpenLLM Server URL must be provided.')
try:
credential_kwargs = {
'server_url': credentials['server_url']
}
llm = OpenLLM(
llm_kwargs={
'max_new_tokens': 10
},
**credential_kwargs
)
llm("ping")
except Exception as ex:
raise CredentialsValidateFailedError(str(ex))
@classmethod
def encrypt_model_credentials(cls, tenant_id: str, model_name: str, model_type: ModelType,
credentials: dict) -> dict:
"""
encrypt model credentials for save.
:param tenant_id:
:param model_name:
:param model_type:
:param credentials:
:return:
"""
credentials['server_url'] = encrypter.encrypt_token(tenant_id, credentials['server_url'])
return credentials
def get_model_credentials(self, model_name: str, model_type: ModelType, obfuscated: bool = False) -> dict:
"""
get credentials for llm use.
:param model_name:
:param model_type:
:param obfuscated:
:return:
"""
if self.provider.provider_type != ProviderType.CUSTOM.value:
raise NotImplementedError
provider_model = self._get_provider_model(model_name, model_type)
if not provider_model.encrypted_config:
return {
'server_url': None
}
credentials = json.loads(provider_model.encrypted_config)
if credentials['server_url']:
credentials['server_url'] = encrypter.decrypt_token(
self.provider.tenant_id,
credentials['server_url']
)
if obfuscated:
credentials['server_url'] = encrypter.obfuscated_token(credentials['server_url'])
return credentials
@classmethod
def is_provider_credentials_valid_or_raise(cls, credentials: dict):
return
@classmethod
def encrypt_provider_credentials(cls, tenant_id: str, credentials: dict) -> dict:
return {}
def get_provider_credentials(self, obfuscated: bool = False) -> dict:
return {}

View File

@@ -0,0 +1,193 @@
import json
from typing import Type
import requests
from xinference.client import RESTfulGenerateModelHandle, RESTfulChatModelHandle, RESTfulChatglmCppChatModelHandle
from core.helper import encrypter
from core.model_providers.models.embedding.xinference_embedding import XinferenceEmbedding
from core.model_providers.models.entity.model_params import KwargRule, ModelKwargsRules, ModelType
from core.model_providers.models.llm.xinference_model import XinferenceModel
from core.model_providers.providers.base import BaseModelProvider, CredentialsValidateFailedError
from core.model_providers.models.base import BaseProviderModel
from core.third_party.langchain.llms.xinference_llm import XinferenceLLM
from models.provider import ProviderType
class XinferenceProvider(BaseModelProvider):
@property
def provider_name(self):
"""
Returns the name of a provider.
"""
return 'xinference'
def _get_fixed_model_list(self, model_type: ModelType) -> list[dict]:
return []
def get_model_class(self, model_type: ModelType) -> Type[BaseProviderModel]:
"""
Returns the model class.
:param model_type:
:return:
"""
if model_type == ModelType.TEXT_GENERATION:
model_class = XinferenceModel
elif model_type == ModelType.EMBEDDINGS:
model_class = XinferenceEmbedding
else:
raise NotImplementedError
return model_class
def get_model_parameter_rules(self, model_name: str, model_type: ModelType) -> ModelKwargsRules:
"""
get model parameter rules.
:param model_name:
:param model_type:
:return:
"""
credentials = self.get_model_credentials(model_name, model_type)
if credentials['model_format'] == "ggmlv3" and credentials["model_handle_type"] == "chatglm":
return ModelKwargsRules(
temperature=KwargRule[float](min=0.01, max=2, default=1),
top_p=KwargRule[float](min=0, max=1, default=0.7),
presence_penalty=KwargRule[float](enabled=False),
frequency_penalty=KwargRule[float](enabled=False),
max_tokens=KwargRule[int](min=10, max=4000, default=256),
)
elif credentials['model_format'] == "ggmlv3":
return ModelKwargsRules(
temperature=KwargRule[float](min=0.01, max=2, default=1),
top_p=KwargRule[float](min=0, max=1, default=0.7),
presence_penalty=KwargRule[float](min=-2, max=2, default=0),
frequency_penalty=KwargRule[float](min=-2, max=2, default=0),
max_tokens=KwargRule[int](min=10, max=4000, default=256),
)
else:
return ModelKwargsRules(
temperature=KwargRule[float](min=0.01, max=2, default=1),
top_p=KwargRule[float](min=0, max=1, default=0.7),
presence_penalty=KwargRule[float](enabled=False),
frequency_penalty=KwargRule[float](enabled=False),
max_tokens=KwargRule[int](alias='max_new_tokens', min=10, max=4000, default=256),
)
@classmethod
def is_model_credentials_valid_or_raise(cls, model_name: str, model_type: ModelType, credentials: dict):
"""
check model credentials valid.
:param model_name:
:param model_type:
:param credentials:
"""
if 'server_url' not in credentials:
raise CredentialsValidateFailedError('Xinference Server URL must be provided.')
if 'model_uid' not in credentials:
raise CredentialsValidateFailedError('Xinference Model UID must be provided.')
try:
credential_kwargs = {
'server_url': credentials['server_url'],
'model_uid': credentials['model_uid'],
}
llm = XinferenceLLM(
**credential_kwargs
)
llm("ping")
except Exception as ex:
raise CredentialsValidateFailedError(str(ex))
@classmethod
def encrypt_model_credentials(cls, tenant_id: str, model_name: str, model_type: ModelType,
credentials: dict) -> dict:
"""
encrypt model credentials for save.
:param tenant_id:
:param model_name:
:param model_type:
:param credentials:
:return:
"""
extra_credentials = cls._get_extra_credentials(credentials)
credentials.update(extra_credentials)
credentials['server_url'] = encrypter.encrypt_token(tenant_id, credentials['server_url'])
return credentials
def get_model_credentials(self, model_name: str, model_type: ModelType, obfuscated: bool = False) -> dict:
"""
get credentials for llm use.
:param model_name:
:param model_type:
:param obfuscated:
:return:
"""
if self.provider.provider_type != ProviderType.CUSTOM.value:
raise NotImplementedError
provider_model = self._get_provider_model(model_name, model_type)
if not provider_model.encrypted_config:
return {
'server_url': None,
'model_uid': None,
}
credentials = json.loads(provider_model.encrypted_config)
if credentials['server_url']:
credentials['server_url'] = encrypter.decrypt_token(
self.provider.tenant_id,
credentials['server_url']
)
if obfuscated:
credentials['server_url'] = encrypter.obfuscated_token(credentials['server_url'])
return credentials
@classmethod
def _get_extra_credentials(self, credentials: dict) -> dict:
url = f"{credentials['server_url']}/v1/models/{credentials['model_uid']}"
response = requests.get(url)
if response.status_code != 200:
raise RuntimeError(
f"Failed to get the model description, detail: {response.json()['detail']}"
)
desc = response.json()
extra_credentials = {
'model_format': desc['model_format'],
}
if desc["model_format"] == "ggmlv3" and "chatglm" in desc["model_name"]:
extra_credentials['model_handle_type'] = 'chatglm'
elif "generate" in desc["model_ability"]:
extra_credentials['model_handle_type'] = 'generate'
elif "chat" in desc["model_ability"]:
extra_credentials['model_handle_type'] = 'chat'
else:
raise NotImplementedError(f"Model handle type not supported.")
return extra_credentials
@classmethod
def is_provider_credentials_valid_or_raise(cls, credentials: dict):
return
@classmethod
def encrypt_provider_credentials(cls, tenant_id: str, credentials: dict) -> dict:
return {}
def get_provider_credentials(self, obfuscated: bool = False) -> dict:
return {}

View File

@@ -8,5 +8,7 @@
"wenxin",
"chatglm",
"replicate",
"huggingface_hub"
"huggingface_hub",
"xinference",
"openllm"
]

View File

@@ -0,0 +1,7 @@
{
"support_provider_types": [
"custom"
],
"system_config": null,
"model_flexibility": "configurable"
}

View File

@@ -0,0 +1,7 @@
{
"support_provider_types": [
"custom"
],
"system_config": null,
"model_flexibility": "configurable"
}

View File

@@ -0,0 +1,21 @@
from typing import List
import numpy as np
from langchain.embeddings import XinferenceEmbeddings
class XinferenceEmbedding(XinferenceEmbeddings):
def embed_documents(self, texts: List[str]) -> List[List[float]]:
vectors = super().embed_documents(texts)
normalized_vectors = [(vector / np.linalg.norm(vector)).tolist() for vector in vectors]
return normalized_vectors
def embed_query(self, text: str) -> List[float]:
vector = super().embed_query(text)
normalized_vector = (vector / np.linalg.norm(vector)).tolist()
return normalized_vector

View File

@@ -0,0 +1,84 @@
from __future__ import annotations
import logging
from typing import (
Any,
Dict,
List,
Optional,
)
import requests
from langchain.llms.utils import enforce_stop_tokens
from pydantic import Field
from langchain.callbacks.manager import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain.llms.base import LLM
logger = logging.getLogger(__name__)
class OpenLLM(LLM):
"""OpenLLM, supporting both in-process model
instance and remote OpenLLM servers.
If you have a OpenLLM server running, you can also use it remotely:
.. code-block:: python
from langchain.llms import OpenLLM
llm = OpenLLM(server_url='http://localhost:3000')
llm("What is the difference between a duck and a goose?")
"""
server_url: Optional[str] = None
"""Optional server URL that currently runs a LLMServer with 'openllm start'."""
llm_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Key word arguments to be passed to openllm.LLM"""
@property
def _llm_type(self) -> str:
return "openllm"
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: CallbackManagerForLLMRun | None = None,
**kwargs: Any,
) -> str:
params = {
"prompt": prompt,
"llm_config": self.llm_kwargs
}
headers = {"Content-Type": "application/json"}
response = requests.post(
f'{self.server_url}/v1/generate',
headers=headers,
json=params
)
if not response.ok:
raise ValueError(f"OpenLLM HTTP {response.status_code} error: {response.text}")
json_response = response.json()
completion = json_response["responses"][0]
if stop is not None:
completion = enforce_stop_tokens(completion, stop)
return completion
async def _acall(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
raise NotImplementedError(
"Async call is not supported for OpenLLM at the moment."
)

View File

@@ -0,0 +1,132 @@
from typing import Optional, List, Any, Union, Generator
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms import Xinference
from langchain.llms.utils import enforce_stop_tokens
from xinference.client import RESTfulChatglmCppChatModelHandle, \
RESTfulChatModelHandle, RESTfulGenerateModelHandle
class XinferenceLLM(Xinference):
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
"""Call the xinference model and return the output.
Args:
prompt: The prompt to use for generation.
stop: Optional list of stop words to use when generating.
generate_config: Optional dictionary for the configuration used for
generation.
Returns:
The generated string by the model.
"""
model = self.client.get_model(self.model_uid)
if isinstance(model, RESTfulChatModelHandle):
generate_config: "LlamaCppGenerateConfig" = kwargs.get("generate_config", {})
if stop:
generate_config["stop"] = stop
if generate_config and generate_config.get("stream"):
combined_text_output = ""
for token in self._stream_generate(
model=model,
prompt=prompt,
run_manager=run_manager,
generate_config=generate_config,
):
combined_text_output += token
return combined_text_output
else:
completion = model.chat(prompt=prompt, generate_config=generate_config)
return completion["choices"][0]["message"]["content"]
elif isinstance(model, RESTfulGenerateModelHandle):
generate_config: "LlamaCppGenerateConfig" = kwargs.get("generate_config", {})
if stop:
generate_config["stop"] = stop
if generate_config and generate_config.get("stream"):
combined_text_output = ""
for token in self._stream_generate(
model=model,
prompt=prompt,
run_manager=run_manager,
generate_config=generate_config,
):
combined_text_output += token
return combined_text_output
else:
completion = model.generate(prompt=prompt, generate_config=generate_config)
return completion["choices"][0]["text"]
elif isinstance(model, RESTfulChatglmCppChatModelHandle):
generate_config: "ChatglmCppGenerateConfig" = kwargs.get("generate_config", {})
if generate_config and generate_config.get("stream"):
combined_text_output = ""
for token in self._stream_generate(
model=model,
prompt=prompt,
run_manager=run_manager,
generate_config=generate_config,
):
combined_text_output += token
completion = combined_text_output
else:
completion = model.chat(prompt=prompt, generate_config=generate_config)
completion = completion["choices"][0]["message"]["content"]
if stop is not None:
completion = enforce_stop_tokens(completion, stop)
return completion
def _stream_generate(
self,
model: Union["RESTfulGenerateModelHandle", "RESTfulChatModelHandle", "RESTfulChatglmCppChatModelHandle"],
prompt: str,
run_manager: Optional[CallbackManagerForLLMRun] = None,
generate_config: Optional[Union["LlamaCppGenerateConfig", "PytorchGenerateConfig", "ChatglmCppGenerateConfig"]] = None,
) -> Generator[str, None, None]:
"""
Args:
prompt: The prompt to use for generation.
model: The model used for generation.
stop: Optional list of stop words to use when generating.
generate_config: Optional dictionary for the configuration used for
generation.
Yields:
A string token.
"""
if isinstance(model, RESTfulGenerateModelHandle):
streaming_response = model.generate(
prompt=prompt, generate_config=generate_config
)
else:
streaming_response = model.chat(
prompt=prompt, generate_config=generate_config
)
for chunk in streaming_response:
if isinstance(chunk, dict):
choices = chunk.get("choices", [])
if choices:
choice = choices[0]
if isinstance(choice, dict):
token = choice.get("text", "")
log_probs = choice.get("logprobs")
if run_manager:
run_manager.on_llm_new_token(
token=token, verbose=self.verbose, log_probs=log_probs
)
yield token

View File

@@ -88,6 +88,11 @@ class WebReaderTool(BaseTool):
texts = character_splitter.split_text(page_contents)
docs = [Document(page_content=t) for t in texts]
if len(docs) == 0:
return "No content found."
docs = docs[1:]
# only use first 5 docs
if len(docs) > 5:
docs = docs[:5]

View File

@@ -20,6 +20,10 @@ def handle(sender, **kwargs):
# generate conversation name
try:
name = LLMGenerator.generate_conversation_name(app_model.tenant_id, message.query, message.answer)
if len(name) > 75:
name = name[:75] + '...'
conversation.name = name
except:
conversation.name = 'New Chat'

View File

@@ -48,4 +48,5 @@ dashscope~=1.5.0
huggingface_hub~=0.16.4
transformers~=4.31.0
stripe~=5.5.0
pandas==1.5.3
pandas==1.5.3
xinference==0.2.1

View File

@@ -284,8 +284,9 @@ class DocumentService:
"github_link": str,
"open_source_license": str,
"commit_date": str,
"commit_author": str
}
"commit_author": str,
},
"others": dict
}
@staticmethod
@@ -370,8 +371,8 @@ class DocumentService:
raise DocumentIndexingError()
# update document to be recover
document.is_paused = False
document.paused_by = current_user.id
document.paused_at = time.time()
document.paused_by = None
document.paused_at = None
db.session.add(document)
db.session.commit()
@@ -972,7 +973,7 @@ class SegmentService:
db.session.add(segment)
db.session.commit()
# update segment vector index
VectorService.create_segment_vector(args['keywords'], segment, dataset)
VectorService.update_segment_vector(args['keywords'], segment, dataset)
except Exception as e:
logging.exception("update segment index failed")
segment.enabled = False

View File

@@ -32,4 +32,11 @@ WENXIN_API_KEY=
WENXIN_SECRET_KEY=
# ChatGLM Credentials
CHATGLM_API_BASE=
CHATGLM_API_BASE=
# Xinference Credentials
XINFERENCE_SERVER_URL=
XINFERENCE_MODEL_UID=
# OpenLLM Credentials
OPENLLM_SERVER_URL=

View File

@@ -0,0 +1,65 @@
import json
import os
from unittest.mock import patch, MagicMock
from core.model_providers.models.embedding.xinference_embedding import XinferenceEmbedding
from core.model_providers.models.entity.model_params import ModelType
from core.model_providers.providers.xinference_provider import XinferenceProvider
from models.provider import Provider, ProviderType, ProviderModel
def get_mock_provider():
return Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name='xinference',
provider_type=ProviderType.CUSTOM.value,
encrypted_config='',
is_valid=True,
)
def get_mock_embedding_model(mocker):
model_name = 'vicuna-v1.3'
server_url = os.environ['XINFERENCE_SERVER_URL']
model_uid = os.environ['XINFERENCE_MODEL_UID']
model_provider = XinferenceProvider(provider=get_mock_provider())
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
provider_name='xinference',
model_name=model_name,
model_type=ModelType.EMBEDDINGS.value,
encrypted_config=json.dumps({
'server_url': server_url,
'model_uid': model_uid
}),
is_valid=True,
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
return XinferenceEmbedding(
model_provider=model_provider,
name=model_name
)
def decrypt_side_effect(tenant_id, encrypted_api_key):
return encrypted_api_key
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_embed_documents(mock_decrypt, mocker):
embedding_model = get_mock_embedding_model(mocker)
rst = embedding_model.client.embed_documents(['test', 'test1'])
assert isinstance(rst, list)
assert len(rst) == 2
assert len(rst[0]) == 4096
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_embed_query(mock_decrypt, mocker):
embedding_model = get_mock_embedding_model(mocker)
rst = embedding_model.client.embed_query('test')
assert isinstance(rst, list)
assert len(rst) == 4096

View File

@@ -50,7 +50,9 @@ def test_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('claude-2')
messages = [PromptMessage(content='Human: 1 + 1=? \nAssistant: ')]
rst = model.run(
@@ -58,4 +60,3 @@ def test_run(mock_decrypt):
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == '2'

View File

@@ -76,6 +76,8 @@ def test_chat_get_num_tokens(mock_decrypt, mocker):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
openai_model = get_mock_azure_openai_model('gpt-35-turbo', mocker)
messages = [PromptMessage(content='Human: Are you Human? you MUST only answer `y` or `n`? \nAssistant: ')]
rst = openai_model.run(
@@ -83,4 +85,3 @@ def test_run(mock_decrypt, mocker):
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == 'n'

View File

@@ -95,6 +95,8 @@ def test_inference_endpoints_get_num_tokens(mock_decrypt, mock_model_info, mocke
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_hosted_inference_api_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model(
'google/flan-t5-base',
'hosted_inference_api',
@@ -111,6 +113,8 @@ def test_hosted_inference_api_run(mock_decrypt, mocker):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_inference_endpoints_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model(
'',
'inference_endpoints',
@@ -121,4 +125,3 @@ def test_inference_endpoints_run(mock_decrypt, mocker):
[PromptMessage(content='Answer the following yes/no question. Can you write a whole Haiku in a single tweet?')],
)
assert len(rst.content) > 0
assert rst.content.strip() == 'no'

View File

@@ -54,11 +54,12 @@ def test_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('abab5.5-chat')
rst = model.run(
[PromptMessage(content='Human: Are you a real Human? you MUST only answer `y` or `n`? \nAssistant: ')],
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == 'n'

View File

@@ -58,7 +58,9 @@ def test_chat_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
openai_model = get_mock_openai_model('text-davinci-003')
rst = openai_model.run(
[PromptMessage(content='Human: Are you Human? you MUST only answer `y` or `n`? \nAssistant: ')],
@@ -69,7 +71,9 @@ def test_run(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_chat_run(mock_decrypt):
def test_chat_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
openai_model = get_mock_openai_model('gpt-3.5-turbo')
messages = [PromptMessage(content='Human: Are you Human? you MUST only answer `y` or `n`? \nAssistant: ')]
rst = openai_model.run(
@@ -77,4 +81,3 @@ def test_chat_run(mock_decrypt):
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == 'n'

View File

@@ -0,0 +1,72 @@
import json
import os
from unittest.mock import patch, MagicMock
from core.model_providers.models.entity.message import PromptMessage, MessageType
from core.model_providers.models.entity.model_params import ModelKwargs, ModelType
from core.model_providers.models.llm.openllm_model import OpenLLMModel
from core.model_providers.providers.openllm_provider import OpenLLMProvider
from models.provider import Provider, ProviderType, ProviderModel
def get_mock_provider():
return Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name='openllm',
provider_type=ProviderType.CUSTOM.value,
encrypted_config='',
is_valid=True,
)
def get_mock_model(model_name, mocker):
model_kwargs = ModelKwargs(
max_tokens=10,
temperature=0.01
)
server_url = os.environ['OPENLLM_SERVER_URL']
model_provider = OpenLLMProvider(provider=get_mock_provider())
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
provider_name='openllm',
model_name=model_name,
model_type=ModelType.TEXT_GENERATION.value,
encrypted_config=json.dumps({
'server_url': server_url
}),
is_valid=True,
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
return OpenLLMModel(
model_provider=model_provider,
name=model_name,
model_kwargs=model_kwargs
)
def decrypt_side_effect(tenant_id, encrypted_api_key):
return encrypted_api_key
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_num_tokens(mock_decrypt, mocker):
model = get_mock_model('facebook/opt-125m', mocker)
rst = model.get_num_tokens([
PromptMessage(type=MessageType.HUMAN, content='Who is your manufacturer?')
])
assert rst == 5
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('facebook/opt-125m', mocker)
messages = [PromptMessage(content='Human: who are you? \nAnswer: ')]
rst = model.run(
messages
)
assert len(rst.content) > 0

View File

@@ -65,6 +65,8 @@ def test_get_num_tokens(mock_decrypt, mocker):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('a16z-infra/llama-2-13b-chat', '2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52', mocker)
messages = [PromptMessage(content='Human: 1+1=? \nAnswer: ')]
rst = model.run(

View File

@@ -58,7 +58,9 @@ def test_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('spark')
messages = [PromptMessage(content='Human: 1 + 1=? \nAssistant: Integer answer is:')]
rst = model.run(
@@ -66,4 +68,3 @@ def test_run(mock_decrypt):
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == '2'

View File

@@ -52,7 +52,9 @@ def test_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('qwen-v1')
rst = model.run(
[PromptMessage(content='Human: Are you Human? you MUST only answer `y` or `n`? \nAssistant: ')],

View File

@@ -52,7 +52,9 @@ def test_get_num_tokens(mock_decrypt):
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt):
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('ernie-bot')
messages = [PromptMessage(content='Human: 1 + 1=? \nAssistant: Integer answer is:')]
rst = model.run(
@@ -60,4 +62,3 @@ def test_run(mock_decrypt):
stop=['\nHuman:'],
)
assert len(rst.content) > 0
assert rst.content.strip() == '2'

View File

@@ -0,0 +1,74 @@
import json
import os
from unittest.mock import patch, MagicMock
from core.model_providers.models.entity.message import PromptMessage, MessageType
from core.model_providers.models.entity.model_params import ModelKwargs, ModelType
from core.model_providers.models.llm.xinference_model import XinferenceModel
from core.model_providers.providers.xinference_provider import XinferenceProvider
from models.provider import Provider, ProviderType, ProviderModel
def get_mock_provider():
return Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name='xinference',
provider_type=ProviderType.CUSTOM.value,
encrypted_config='',
is_valid=True,
)
def get_mock_model(model_name, mocker):
model_kwargs = ModelKwargs(
max_tokens=10,
temperature=0.01
)
server_url = os.environ['XINFERENCE_SERVER_URL']
model_uid = os.environ['XINFERENCE_MODEL_UID']
model_provider = XinferenceProvider(provider=get_mock_provider())
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
provider_name='xinference',
model_name=model_name,
model_type=ModelType.TEXT_GENERATION.value,
encrypted_config=json.dumps({
'server_url': server_url,
'model_uid': model_uid
}),
is_valid=True,
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
return XinferenceModel(
model_provider=model_provider,
name=model_name,
model_kwargs=model_kwargs
)
def decrypt_side_effect(tenant_id, encrypted_api_key):
return encrypted_api_key
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_num_tokens(mock_decrypt, mocker):
model = get_mock_model('llama-2-chat', mocker)
rst = model.get_num_tokens([
PromptMessage(type=MessageType.HUMAN, content='Who is your manufacturer?')
])
assert rst == 5
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_run(mock_decrypt, mocker):
mocker.patch('core.model_providers.providers.base.BaseModelProvider.update_last_used', return_value=None)
model = get_mock_model('llama-2-chat', mocker)
messages = [PromptMessage(content='Human: 1+1=? \nAnswer: ')]
rst = model.run(
messages
)
assert len(rst.content) > 0

View File

@@ -0,0 +1,122 @@
import pytest
from unittest.mock import patch, MagicMock
import json
from core.model_providers.models.entity.model_params import ModelType
from core.model_providers.providers.base import CredentialsValidateFailedError
from core.model_providers.providers.openllm_provider import OpenLLMProvider
from models.provider import ProviderType, Provider, ProviderModel
PROVIDER_NAME = 'openllm'
MODEL_PROVIDER_CLASS = OpenLLMProvider
VALIDATE_CREDENTIAL = {
'server_url': 'http://127.0.0.1:3333/'
}
def encrypt_side_effect(tenant_id, encrypt_key):
return f'encrypted_{encrypt_key}'
def decrypt_side_effect(tenant_id, encrypted_key):
return encrypted_key.replace('encrypted_', '')
def test_is_credentials_valid_or_raise_valid(mocker):
mocker.patch('core.third_party.langchain.llms.openllm.OpenLLM._call',
return_value="abc")
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='username/test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials=VALIDATE_CREDENTIAL.copy()
)
def test_is_credentials_valid_or_raise_invalid(mocker):
# raise CredentialsValidateFailedError if credential is not in credentials
with pytest.raises(CredentialsValidateFailedError):
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials={}
)
# raise CredentialsValidateFailedError if credential is invalid
with pytest.raises(CredentialsValidateFailedError):
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials={'server_url': 'invalid'})
@patch('core.helper.encrypter.encrypt_token', side_effect=encrypt_side_effect)
def test_encrypt_model_credentials(mock_encrypt):
api_key = 'http://127.0.0.1:3333/'
result = MODEL_PROVIDER_CLASS.encrypt_model_credentials(
tenant_id='tenant_id',
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials=VALIDATE_CREDENTIAL.copy()
)
mock_encrypt.assert_called_with('tenant_id', api_key)
assert result['server_url'] == f'encrypted_{api_key}'
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_model_credentials_custom(mock_decrypt, mocker):
provider = Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name=PROVIDER_NAME,
provider_type=ProviderType.CUSTOM.value,
encrypted_config=None,
is_valid=True,
)
encrypted_credential = VALIDATE_CREDENTIAL.copy()
encrypted_credential['server_url'] = 'encrypted_' + encrypted_credential['server_url']
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
encrypted_config=json.dumps(encrypted_credential)
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
model_provider = MODEL_PROVIDER_CLASS(provider=provider)
result = model_provider.get_model_credentials(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION
)
assert result['server_url'] == 'http://127.0.0.1:3333/'
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_model_credentials_obfuscated(mock_decrypt, mocker):
provider = Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name=PROVIDER_NAME,
provider_type=ProviderType.CUSTOM.value,
encrypted_config=None,
is_valid=True,
)
encrypted_credential = VALIDATE_CREDENTIAL.copy()
encrypted_credential['server_url'] = 'encrypted_' + encrypted_credential['server_url']
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
encrypted_config=json.dumps(encrypted_credential)
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
model_provider = MODEL_PROVIDER_CLASS(provider=provider)
result = model_provider.get_model_credentials(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
obfuscated=True
)
middle_token = result['server_url'][6:-2]
assert len(middle_token) == max(len(VALIDATE_CREDENTIAL['server_url']) - 8, 0)
assert all(char == '*' for char in middle_token)

View File

@@ -0,0 +1,130 @@
import pytest
from unittest.mock import patch, MagicMock
import json
from core.model_providers.models.entity.model_params import ModelType
from core.model_providers.providers.base import CredentialsValidateFailedError
from core.model_providers.providers.xinference_provider import XinferenceProvider
from models.provider import ProviderType, Provider, ProviderModel
PROVIDER_NAME = 'xinference'
MODEL_PROVIDER_CLASS = XinferenceProvider
VALIDATE_CREDENTIAL = {
'model_uid': 'fake-model-uid',
'server_url': 'http://127.0.0.1:9997/'
}
def encrypt_side_effect(tenant_id, encrypt_key):
return f'encrypted_{encrypt_key}'
def decrypt_side_effect(tenant_id, encrypted_key):
return encrypted_key.replace('encrypted_', '')
def test_is_credentials_valid_or_raise_valid(mocker):
mocker.patch('core.third_party.langchain.llms.xinference_llm.XinferenceLLM._call',
return_value="abc")
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='username/test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials=VALIDATE_CREDENTIAL.copy()
)
def test_is_credentials_valid_or_raise_invalid():
# raise CredentialsValidateFailedError if replicate_api_token is not in credentials
with pytest.raises(CredentialsValidateFailedError):
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials={}
)
# raise CredentialsValidateFailedError if replicate_api_token is invalid
with pytest.raises(CredentialsValidateFailedError):
MODEL_PROVIDER_CLASS.is_model_credentials_valid_or_raise(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials={'server_url': 'invalid'})
@patch('core.helper.encrypter.encrypt_token', side_effect=encrypt_side_effect)
def test_encrypt_model_credentials(mock_encrypt, mocker):
api_key = 'http://127.0.0.1:9997/'
mocker.patch('core.model_providers.providers.xinference_provider.XinferenceProvider._get_extra_credentials',
return_value={
'model_handle_type': 'generate',
'model_format': 'ggmlv3'
})
result = MODEL_PROVIDER_CLASS.encrypt_model_credentials(
tenant_id='tenant_id',
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
credentials=VALIDATE_CREDENTIAL.copy()
)
mock_encrypt.assert_called_with('tenant_id', api_key)
assert result['server_url'] == f'encrypted_{api_key}'
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_model_credentials_custom(mock_decrypt, mocker):
provider = Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name=PROVIDER_NAME,
provider_type=ProviderType.CUSTOM.value,
encrypted_config=None,
is_valid=True,
)
encrypted_credential = VALIDATE_CREDENTIAL.copy()
encrypted_credential['server_url'] = 'encrypted_' + encrypted_credential['server_url']
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
encrypted_config=json.dumps(encrypted_credential)
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
model_provider = MODEL_PROVIDER_CLASS(provider=provider)
result = model_provider.get_model_credentials(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION
)
assert result['server_url'] == 'http://127.0.0.1:9997/'
@patch('core.helper.encrypter.decrypt_token', side_effect=decrypt_side_effect)
def test_get_model_credentials_obfuscated(mock_decrypt, mocker):
provider = Provider(
id='provider_id',
tenant_id='tenant_id',
provider_name=PROVIDER_NAME,
provider_type=ProviderType.CUSTOM.value,
encrypted_config=None,
is_valid=True,
)
encrypted_credential = VALIDATE_CREDENTIAL.copy()
encrypted_credential['server_url'] = 'encrypted_' + encrypted_credential['server_url']
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = ProviderModel(
encrypted_config=json.dumps(encrypted_credential)
)
mocker.patch('extensions.ext_database.db.session.query', return_value=mock_query)
model_provider = MODEL_PROVIDER_CLASS(provider=provider)
result = model_provider.get_model_credentials(
model_name='test_model_name',
model_type=ModelType.TEXT_GENERATION,
obfuscated=True
)
middle_token = result['server_url'][6:-2]
assert len(middle_token) == max(len(VALIDATE_CREDENTIAL['server_url']) - 8, 0)
assert all(char == '*' for char in middle_token)

View File

@@ -2,7 +2,7 @@ version: '3.1'
services:
# API service
api:
image: langgenius/dify-api:0.3.14
image: langgenius/dify-api:0.3.15
restart: always
environment:
# Startup mode, 'api' starts the API server.
@@ -124,7 +124,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.3.14
image: langgenius/dify-api:0.3.15
restart: always
environment:
# Startup mode, 'worker' starts the Celery worker for processing the queue.
@@ -176,7 +176,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.3.14
image: langgenius/dify-web:0.3.15
restart: always
environment:
EDITION: SELF_HOSTED

View File

@@ -17,6 +17,7 @@ import type { App } from '@/types/app'
import type { UpdateAppSiteCodeResponse } from '@/models/app'
import { asyncRunSafe } from '@/utils'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import type { IAppCardProps } from '@/app/components/app/overview/appCard'
export type ICardViewProps = {
appId: string
@@ -68,7 +69,7 @@ const CardView: FC<ICardViewProps> = ({ appId }) => {
handleError(err)
}
const onSaveSiteConfig = async (params: any) => {
const onSaveSiteConfig: IAppCardProps['onSaveSiteConfig'] = async (params) => {
const [err] = await asyncRunSafe<App>(
updateAppSiteConfig({
url: `/apps/${appId}/site`,

View File

@@ -16,7 +16,6 @@ const Overview = async ({
const { t } = await useTranslation(locale, 'app-overview')
return (
<div className="h-full px-16 py-6 overflow-scroll">
{/* <WelcomeBanner /> */}
<ApikeyInfoPanel />
<div className='flex flex-row items-center justify-between mb-4 text-xl text-gray-900'>
{t('overview.title')}

View File

@@ -1,200 +0,0 @@
'use client'
import type { FC } from 'react'
import React, { useState } from 'react'
import { useContext } from 'use-context-selector'
import { useTranslation } from 'react-i18next'
import Link from 'next/link'
import useSWR, { useSWRConfig } from 'swr'
import { ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline'
import { ExclamationCircleIcon } from '@heroicons/react/24/solid'
import { debounce } from 'lodash-es'
import Popover from '@/app/components/base/popover'
import Button from '@/app/components/base/button'
import Tag from '@/app/components/base/tag'
import { ToastContext } from '@/app/components/base/toast'
import { updateOpenAIKey, validateOpenAIKey } from '@/service/apps'
import { fetchTenantInfo } from '@/service/common'
import I18n from '@/context/i18n'
type IStatusType = 'normal' | 'verified' | 'error' | 'error-api-key-exceed-bill'
const STATUS_COLOR_MAP = {
'normal': { color: '', bgColor: 'bg-primary-50', borderColor: 'border-primary-100' },
'error': { color: 'text-red-600', bgColor: 'bg-red-50', borderColor: 'border-red-100' },
'verified': { color: '', bgColor: 'bg-green-50', borderColor: 'border-green-100' },
'error-api-key-exceed-bill': { color: 'text-red-600', bgColor: 'bg-red-50', borderColor: 'border-red-100' },
}
const CheckCircleIcon: FC<{ className?: string }> = ({ className }) => {
return <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
<rect width="20" height="20" rx="10" fill="#DEF7EC" />
<path fillRule="evenodd" clipRule="evenodd" d="M14.6947 6.70495C14.8259 6.83622 14.8996 7.01424 14.8996 7.19985C14.8996 7.38547 14.8259 7.56348 14.6947 7.69475L9.0947 13.2948C8.96343 13.426 8.78541 13.4997 8.5998 13.4997C8.41418 13.4997 8.23617 13.426 8.1049 13.2948L5.3049 10.4948C5.17739 10.3627 5.10683 10.1859 5.10842 10.0024C5.11002 9.81883 5.18364 9.64326 5.31342 9.51348C5.44321 9.38369 5.61878 9.31007 5.80232 9.30848C5.98585 9.30688 6.16268 9.37744 6.2947 9.50495L8.5998 11.8101L13.7049 6.70495C13.8362 6.57372 14.0142 6.5 14.1998 6.5C14.3854 6.5 14.5634 6.57372 14.6947 6.70495Z" fill="#046C4E" />
</svg>
}
type IEditKeyDiv = {
className?: string
showInPopover?: boolean
onClose?: () => void
getTenantInfo?: () => void
}
const EditKeyDiv: FC<IEditKeyDiv> = ({ className = '', showInPopover = false, onClose, getTenantInfo }) => {
const [inputValue, setInputValue] = useState<string | undefined>()
const [editStatus, setEditStatus] = useState<IStatusType>('normal')
const [loading, setLoading] = useState(false)
const [validating, setValidating] = useState(false)
const { notify } = useContext(ToastContext)
const { t } = useTranslation()
const { locale } = useContext(I18n)
// Hide the pop-up window and need to get the latest key again
// If the key is valid, the edit button will be hidden later
const onClosePanel = () => {
getTenantInfo && getTenantInfo()
onClose && onClose()
}
const onSaveKey = async () => {
if (editStatus === 'verified') {
setLoading(true)
try {
await updateOpenAIKey({ url: '/providers/openai/token', body: { token: inputValue ?? '' } })
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
onClosePanel()
}
catch (err) {
notify({ type: 'error', message: t('common.actionMsg.modificationFailed') })
}
finally {
setLoading(false)
}
}
}
const validateKey = async (value: string) => {
try {
setValidating(true)
const res = await validateOpenAIKey({ url: '/providers/openai/token-validate', body: { token: value ?? '' } })
setEditStatus(res.result === 'success' ? 'verified' : 'error')
}
catch (err: any) {
if (err.status === 400) {
err.json().then(({ code }: any) => {
if (code === 'provider_request_failed')
setEditStatus('error-api-key-exceed-bill')
})
}
else {
setEditStatus('error')
}
}
finally {
setValidating(false)
}
}
const renderErrorMessage = () => {
if (validating) {
return (
<div className={'text-primary-600 mt-2 text-xs'}>
{t('common.provider.validating')}
</div>
)
}
if (editStatus === 'error-api-key-exceed-bill') {
return (
<div className={'text-[#D92D20] mt-2 text-xs'}>
{t('common.provider.apiKeyExceedBill')}
{locale === 'en' ? ' ' : ''}
<Link
className='underline'
href="https://platform.openai.com/account/api-keys"
target={'_blank'}>
{locale === 'en' ? 'this link' : '这篇文档'}
</Link>
</div>
)
}
if (editStatus === 'error') {
return (
<div className={'text-[#D92D20] mt-2 text-xs'}>
{t('common.provider.invalidKey')}
</div>
)
}
return null
}
return (
<div className={`flex flex-col w-full rounded-lg px-8 py-6 border-solid border-[0.5px] ${className} ${Object.values(STATUS_COLOR_MAP[editStatus]).join(' ')}`}>
{!showInPopover && <p className='text-xl font-medium text-gray-800'>{t('appOverview.welcome.firstStepTip')}</p>}
<p className={`${showInPopover ? 'text-sm' : 'text-xl'} font-medium text-gray-800`}>{t('appOverview.welcome.enterKeyTip')} {showInPopover ? '' : '👇'}</p>
<div className='relative mt-2'>
<input type="text"
className={`h-9 w-96 max-w-full py-2 pl-2 text-gray-900 rounded-lg bg-white sm:text-xs focus:ring-blue-500 focus:border-blue-500 shadow-sm ${editStatus === 'normal' ? 'pr-2' : 'pr-8'}`}
placeholder={t('appOverview.welcome.placeholder') || ''}
onChange={debounce((e) => {
setInputValue(e.target.value)
if (!e.target.value) {
setEditStatus('normal')
return
}
validateKey(e.target.value)
}, 300)}
/>
{editStatus === 'verified' && <div className="absolute inset-y-0 right-0 flex flex-row-reverse items-center pr-6 pointer-events-none">
<CheckCircleIcon className="rounded-lg" />
</div>}
{(editStatus === 'error' || editStatus === 'error-api-key-exceed-bill') && <div className="absolute inset-y-0 right-0 flex flex-row-reverse items-center pr-6 pointer-events-none">
<ExclamationCircleIcon className="w-5 h-5 text-red-800" />
</div>}
{showInPopover ? null : <Button type='primary' onClick={onSaveKey} className='!h-9 !inline-block ml-2' loading={loading} disabled={editStatus !== 'verified'}>{t('common.operation.save')}</Button>}
</div>
{renderErrorMessage()}
<Link className="inline-flex items-center mt-2 text-xs font-normal cursor-pointer text-primary-600 w-fit" href="https://platform.openai.com/account/api-keys" target={'_blank'}>
{t('appOverview.welcome.getKeyTip')}
<ArrowTopRightOnSquareIcon className='w-3 h-3 ml-1 text-primary-600' aria-hidden="true" />
</Link>
{showInPopover && <div className='flex justify-end mt-6'>
<Button className='flex-shrink-0 mr-2' onClick={onClosePanel}>{t('common.operation.cancel')}</Button>
<Button type='primary' className='flex-shrink-0' onClick={onSaveKey} loading={loading} disabled={editStatus !== 'verified'}>{t('common.operation.save')}</Button>
</div>}
</div>
)
}
const WelcomeBanner: FC = () => {
const { data: userInfo } = useSWR({ url: '/info' }, fetchTenantInfo)
if (!userInfo)
return null
return userInfo?.providers?.find(({ token_is_set }) => token_is_set) ? null : <EditKeyDiv className='mb-8' />
}
export const EditKeyPopover: FC = () => {
const { data: userInfo } = useSWR({ url: '/info' }, fetchTenantInfo)
const { mutate } = useSWRConfig()
if (!userInfo)
return null
const getTenantInfo = () => {
mutate({ url: '/info' })
}
// In this case, the edit button is displayed
const targetProvider = userInfo?.providers?.some(({ token_is_set, is_valid }) => token_is_set && is_valid)
return (
!targetProvider
? <div className='flex items-center'>
<Tag className='mr-2 h-fit' color='red'><ExclamationCircleIcon className='h-3.5 w-3.5 mr-2' />OpenAI API key invalid</Tag>
<Popover
htmlContent={<EditKeyDiv className='!border-0' showInPopover={true} getTenantInfo={getTenantInfo} />}
trigger='click'
position='br'
btnElement='Edit'
btnClassName='text-primary-600 !text-xs px-3 py-1.5'
className='!p-0 !w-[464px] h-[200px]'
/>
</div>
: null)
}
export default WelcomeBanner

View File

@@ -68,11 +68,11 @@ const Apps = () => {
return (
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0'>
{ isCurrentWorkspaceManager
&& <NewAppCard ref={anchorRef} onSuccess={mutate} />}
{data?.map(({ data: apps }) => apps.map(app => (
<AppCard key={app.id} app={app} onRefresh={mutate} />
)))}
{ isCurrentWorkspaceManager
&& <NewAppCard ref={anchorRef} onSuccess={mutate} />}
{
showPayStatusModal && (
<Confirm

View File

@@ -42,10 +42,10 @@ const Datasets = () => {
return (
<nav className='grid content-start grid-cols-1 gap-4 px-12 pt-8 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 grow shrink-0'>
{ isCurrentWorkspaceManager && <NewDatasetCard ref={anchorRef} /> }
{data?.map(({ data: datasets }) => datasets.map(dataset => (
<DatasetCard key={dataset.id} dataset={dataset} onDelete={mutate} />),
))}
{ isCurrentWorkspaceManager && <NewDatasetCard ref={anchorRef} /> }
</nav>
)
}

View File

@@ -1,5 +1,5 @@
'use client'
import { useState } from 'react'
import { useCallback, useState } from 'react'
import { useContext } from 'use-context-selector'
import { useTranslation } from 'react-i18next'
import useSWR from 'swr'
@@ -22,7 +22,7 @@ const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/
const ActivateForm = () => {
const { t } = useTranslation()
const { locale } = useContext(I18n)
const { locale, setLocaleOnClient } = useContext(I18n)
const searchParams = useSearchParams()
const workspaceID = searchParams.get('workspace_id')
const email = searchParams.get('email')
@@ -45,14 +45,16 @@ const ActivateForm = () => {
const [timezone, setTimezone] = useState('Asia/Shanghai')
const [language, setLanguage] = useState('en-US')
const [showSuccess, setShowSuccess] = useState(false)
const defaultLanguage = useCallback(() => (window.navigator.language.startsWith('zh') ? languageMaps['zh-Hans'] : languageMaps.en) || languageMaps.en, [])
const showErrorMessage = (message: string) => {
const showErrorMessage = useCallback((message: string) => {
Toast.notify({
type: 'error',
message,
})
}
const valid = () => {
}, [])
const valid = useCallback(() => {
if (!name.trim()) {
showErrorMessage(t('login.error.nameEmpty'))
return false
@@ -65,9 +67,9 @@ const ActivateForm = () => {
showErrorMessage(t('login.error.passwordInvalid'))
return true
}
}, [name, password, showErrorMessage, t])
const handleActivate = async () => {
const handleActivate = useCallback(async () => {
if (!valid())
return
try {
@@ -83,17 +85,18 @@ const ActivateForm = () => {
timezone,
},
})
setLocaleOnClient(language.startsWith('en') ? 'en' : 'zh-Hans', false)
setShowSuccess(true)
}
catch {
recheck()
}
}
}, [email, language, name, password, recheck, setLocaleOnClient, timezone, token, valid, workspaceID])
return (
<div className={
cn(
'flex flex-col items-center w-full grow items-center justify-center',
'flex flex-col items-center w-full grow justify-center',
'px-6',
'md:px-[108px]',
)
@@ -167,7 +170,7 @@ const ActivateForm = () => {
</label>
<div className="relative mt-1 rounded-md shadow-sm">
<SimpleSelect
defaultValue={languageMaps.en}
defaultValue={defaultLanguage()}
items={languages}
onSelect={(item) => {
setLanguage(item.value as string)

View File

@@ -225,7 +225,7 @@ const Answer: FC<IAnswerProps> = ({ item, feedbackDisabled = false, isHideFeedba
setLoading(true)
const res = await onSubmitAnnotation?.(id, inputValue)
if (res)
setAnnotation({ ...annotation, content: inputValue } as any)
setAnnotation({ ...annotation, content: inputValue } as Annotation)
setLoading(false)
setShowEdit(false)
}}>{t('common.operation.confirm')}</Button>

View File

@@ -81,7 +81,7 @@ const Chat: FC<IChatProps> = ({
const isUseInputMethod = useRef(false)
const [query, setQuery] = React.useState('')
const handleContentChange = (e: any) => {
const handleContentChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
const value = e.target.value
setQuery(value)
}
@@ -111,7 +111,7 @@ const Chat: FC<IChatProps> = ({
setQuery('')
}
const handleKeyUp = (e: any) => {
const handleKeyUp = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.code === 'Enter') {
e.preventDefault()
// prevent send message when using input method enter
@@ -120,7 +120,7 @@ const Chat: FC<IChatProps> = ({
}
}
const handleKeyDown = (e: any) => {
const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
isUseInputMethod.current = e.nativeEvent.isComposing
if (e.code === 'Enter' && !e.shiftKey) {
setQuery(query.replace(/\n$/, ''))

View File

@@ -1,19 +1,19 @@
'use client'
import React, { FC, ReactNode } from 'react'
import { ReactElement } from 'react-markdown/lib/react-markdown'
import React from 'react'
import type { FC } from 'react'
export interface IInputTypeIconProps {
type: string
type IInputTypeIconProps = {
type: 'string' | 'select'
}
const IconMap = (type: string) => {
const icons: Record<string, ReactNode> = {
'string': (
const IconMap = (type: IInputTypeIconProps['type']) => {
const icons = {
string: (
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M3.52593 0.166672H8.47411C8.94367 0.166665 9.33123 0.166659 9.64692 0.192452C9.97481 0.219242 10.2762 0.276738 10.5593 0.420991C10.9984 0.644695 11.3553 1.00165 11.579 1.44069C11.7233 1.72381 11.7808 2.02522 11.8076 2.35311C11.8334 2.6688 11.8334 3.05634 11.8334 3.5259V8.47411C11.8334 8.94367 11.8334 9.33121 11.8076 9.6469C11.7808 9.97479 11.7233 10.2762 11.579 10.5593C11.3553 10.9984 10.9984 11.3553 10.5593 11.579C10.2762 11.7233 9.97481 11.7808 9.64692 11.8076C9.33123 11.8334 8.94369 11.8333 8.47413 11.8333H3.52592C3.05636 11.8333 2.66882 11.8334 2.35312 11.8076C2.02523 11.7808 1.72382 11.7233 1.44071 11.579C1.00167 11.3553 0.644711 10.9984 0.421006 10.5593C0.276753 10.2762 0.219257 9.97479 0.192468 9.6469C0.166674 9.33121 0.16668 8.94366 0.166687 8.4741V3.52591C0.16668 3.05635 0.166674 2.6688 0.192468 2.35311C0.219257 2.02522 0.276753 1.72381 0.421006 1.44069C0.644711 1.00165 1.00167 0.644695 1.44071 0.420991C1.72382 0.276738 2.02523 0.219242 2.35312 0.192452C2.66882 0.166659 3.05637 0.166665 3.52593 0.166672ZM3.08335 3.08334C3.08335 2.76117 3.34452 2.50001 3.66669 2.50001H8.33335C8.65552 2.50001 8.91669 2.76117 8.91669 3.08334C8.91669 3.4055 8.65552 3.66667 8.33335 3.66667H6.58335V8.91667C6.58335 9.23884 6.32219 9.5 6.00002 9.5C5.67785 9.5 5.41669 9.23884 5.41669 8.91667V3.66667H3.66669C3.34452 3.66667 3.08335 3.4055 3.08335 3.08334Z" fill="#98A2B3" />
</svg>
),
'select': (
select: (
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M7.48913 4.08334H3.01083C2.70334 4.08333 2.43804 4.08333 2.21955 4.10118C1.98893 4.12002 1.75955 4.16162 1.53883 4.27408C1.20955 4.44186 0.941831 4.70958 0.774053 5.03886C0.66159 5.25958 0.619989 5.48896 0.601147 5.71958C0.583295 5.93807 0.583304 6.20334 0.583313 6.51084V10.9892C0.583304 11.2967 0.583295 11.5619 0.601147 11.7804C0.619989 12.0111 0.66159 12.2404 0.774053 12.4612C0.941831 12.7904 1.20955 13.0582 1.53883 13.2259C1.75955 13.3384 1.98893 13.38 2.21955 13.3988C2.43803 13.4167 2.70329 13.4167 3.01077 13.4167H7.48912C7.7966 13.4167 8.06193 13.4167 8.28041 13.3988C8.51103 13.38 8.74041 13.3384 8.96113 13.2259C9.29041 13.0582 9.55813 12.7904 9.72591 12.4612C9.83837 12.2404 9.87997 12.0111 9.89882 11.7804C9.91667 11.5619 9.91666 11.2967 9.91665 10.9892V6.51087C9.91666 6.20336 9.91667 5.93808 9.89882 5.71958C9.87997 5.48896 9.83837 5.25958 9.72591 5.03886C9.55813 4.70958 9.29041 4.44186 8.96113 4.27408C8.74041 4.16162 8.51103 4.12002 8.28041 4.10118C8.06192 4.08333 7.79663 4.08333 7.48913 4.08334ZM7.70413 7.70416C7.93193 7.47635 7.93193 7.107 7.70413 6.8792C7.47632 6.65139 7.10697 6.65139 6.87917 6.8792L4.66665 9.09172L3.91246 8.33753C3.68465 8.10973 3.31531 8.10973 3.0875 8.33753C2.8597 8.56534 2.8597 8.93468 3.0875 9.16249L4.25417 10.3292C4.48197 10.557 4.85132 10.557 5.07913 10.3292L7.70413 7.70416Z" fill="#98A2B3" />
<path d="M10.9891 0.583344H6.51083C6.20334 0.583334 5.93804 0.583326 5.71955 0.601177C5.48893 0.620019 5.25955 0.66162 5.03883 0.774084C4.70955 0.941862 4.44183 1.20958 4.27405 1.53886C4.16159 1.75958 4.11999 1.98896 4.10115 2.21958C4.08514 2.41545 4.08349 2.64892 4.08333 2.91669L7.51382 2.91668C7.79886 2.91662 8.10791 2.91654 8.37541 2.9384C8.67818 2.96314 9.07818 3.02436 9.49078 3.23459C10.0396 3.51422 10.4858 3.96041 10.7654 4.50922C10.9756 4.92182 11.0369 5.32182 11.0616 5.62459C11.0835 5.8921 11.0834 6.20115 11.0833 6.48619L11.0833 9.91666C11.3511 9.9165 11.5845 9.91485 11.7804 9.89885C12.011 9.88 12.2404 9.8384 12.4611 9.72594C12.7904 9.55816 13.0581 9.29045 13.2259 8.96116C13.3384 8.74044 13.38 8.51106 13.3988 8.28044C13.4167 8.06196 13.4167 7.7967 13.4166 7.48922V3.01087C13.4167 2.70339 13.4167 2.43807 13.3988 2.21958C13.38 1.98896 13.3384 1.75958 13.2259 1.53886C13.0581 1.20958 12.7904 0.941862 12.4611 0.774084C12.2404 0.66162 12.011 0.620019 11.7804 0.601177C11.5619 0.583326 11.2966 0.583334 10.9891 0.583344Z" fill="#98A2B3" />
@@ -21,11 +21,11 @@ const IconMap = (type: string) => {
),
}
return icons[type] as any
return icons[type]
}
const InputTypeIcon: FC<IInputTypeIconProps> = ({
type
type,
}) => {
const Icon = IconMap(type)
return Icon

View File

@@ -38,7 +38,7 @@ const Config: FC = () => {
setSpeechToTextConfig,
} = useContext(ConfigContext)
const isChatApp = mode === AppType.chat
const { currentProvider } = useProviderContext()
const { speech2textDefaultModel } = useProviderContext()
const promptTemplate = modelConfig.configs.prompt_template
const promptVariables = modelConfig.configs.prompt_variables
@@ -90,7 +90,7 @@ const Config: FC = () => {
},
})
const hasChatConfig = isChatApp && (featureConfig.openingStatement || featureConfig.suggestedQuestionsAfterAnswer || (featureConfig.speechToText && currentProvider?.provider_name === 'openai'))
const hasChatConfig = isChatApp && (featureConfig.openingStatement || featureConfig.suggestedQuestionsAfterAnswer || (featureConfig.speechToText && !!speech2textDefaultModel))
const hasToolbox = false
const [showAutomatic, { setTrue: showAutomaticTrue, setFalse: showAutomaticFalse }] = useBoolean(false)
@@ -120,7 +120,7 @@ const Config: FC = () => {
isChatApp={isChatApp}
config={featureConfig}
onChange={handleFeatureChange}
showSpeechToTextItem={currentProvider?.provider_name === 'openai'}
showSpeechToTextItem={!!speech2textDefaultModel}
/>
)}
{showAutomatic && (
@@ -160,7 +160,7 @@ const Config: FC = () => {
}
}
isShowSuggestedQuestionsAfterAnswer={featureConfig.suggestedQuestionsAfterAnswer}
isShowSpeechText={featureConfig.speechToText && currentProvider?.provider_name === 'openai'}
isShowSpeechText={featureConfig.speechToText && !!speech2textDefaultModel}
/>
)
}

View File

@@ -6,22 +6,17 @@ import { useTranslation } from 'react-i18next'
import TypeIcon from '../type-icon'
import RemoveIcon from '../../base/icons/remove-icon'
import s from './style.module.css'
import type { DataSet } from '@/models/datasets'
import { formatNumber } from '@/utils/format'
import Tooltip from '@/app/components/base/tooltip'
export type ICardItemProps = {
className?: string
config: any
config: DataSet
onRemove: (id: string) => void
readonly?: boolean
}
// const RemoveIcon = ({ className, onClick }: { className: string, onClick: () => void }) => (
// <svg className={className} onClick={onClick} width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
// <path d="M10 6H14M6 8H18M16.6667 8L16.1991 15.0129C16.129 16.065 16.0939 16.5911 15.8667 16.99C15.6666 17.3412 15.3648 17.6235 15.0011 17.7998C14.588 18 14.0607 18 13.0062 18H10.9938C9.93927 18 9.41202 18 8.99889 17.7998C8.63517 17.6235 8.33339 17.3412 8.13332 16.99C7.90607 16.5911 7.871 16.065 7.80086 15.0129L7.33333 8M10.6667 11V14.3333M13.3333 11V14.3333" stroke="#667085" strokeWidth="1.5" strokeLinecap="round" strokeLinejoin="round" />
// </svg>
// )
const CardItem: FC<ICardItemProps> = ({
className,
config,

View File

@@ -52,7 +52,7 @@ const Debug: FC<IDebug> = ({
modelConfig,
completionParams,
} = useContext(ConfigContext)
const { currentProvider } = useProviderContext()
const { speech2textDefaultModel } = useProviderContext()
const [chatList, setChatList, getChatList] = useGetState<IChatItem[]>([])
const chatListDomRef = useRef<HTMLDivElement>(null)
useEffect(() => {
@@ -390,7 +390,7 @@ const Debug: FC<IDebug> = ({
}}
isShowSuggestion={doShowSuggestion}
suggestionList={suggestQuestions}
isShowSpeechToText={speechToTextConfig.enabled && currentProvider?.provider_name === 'openai'}
isShowSpeechToText={speechToTextConfig.enabled && !!speech2textDefaultModel}
/>
</div>
</div>

View File

@@ -16,22 +16,21 @@ import ConfigModel from '@/app/components/app/configuration/config-model'
import Config from '@/app/components/app/configuration/config'
import Debug from '@/app/components/app/configuration/debug'
import Confirm from '@/app/components/base/confirm'
import { ProviderType } from '@/types/app'
import { ProviderEnum } from '@/app/components/header/account-setting/model-page/declarations'
import type { AppDetailResponse } from '@/models/app'
import { ToastContext } from '@/app/components/base/toast'
import { fetchTenantInfo } from '@/service/common'
import { fetchAppDetail, updateAppModelConfig } from '@/service/apps'
import { promptVariablesToUserInputsForm, userInputsFormToPromptVariables } from '@/utils/model-config'
import { fetchDatasets } from '@/service/datasets'
import AccountSetting from '@/app/components/header/account-setting'
import { useProviderContext } from '@/context/provider-context'
const Configuration: FC = () => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [hasFetchedDetail, setHasFetchedDetail] = useState(false)
const [hasFetchedKey, setHasFetchedKey] = useState(false)
const isLoading = !hasFetchedDetail || !hasFetchedKey
const isLoading = !hasFetchedDetail
const pathname = usePathname()
const matched = pathname.match(/\/app\/([^/]+)/)
const appId = (matched?.length && matched[1]) ? matched[1] : ''
@@ -68,7 +67,7 @@ const Configuration: FC = () => {
frequency_penalty: 1, // -2-2
})
const [modelConfig, doSetModelConfig] = useState<ModelConfig>({
provider: ProviderType.openai,
provider: ProviderEnum.openai,
model_id: 'gpt-3.5-turbo',
configs: {
prompt_template: '',
@@ -85,7 +84,7 @@ const Configuration: FC = () => {
doSetModelConfig(newModelConfig)
}
const setModelId = (modelId: string, provider: ProviderType) => {
const setModelId = (modelId: string, provider: ProviderEnum) => {
const newModelConfig = produce(modelConfig, (draft: any) => {
draft.provider = provider
draft.model_id = modelId
@@ -113,25 +112,27 @@ const Configuration: FC = () => {
})
}
const [hasSetCustomAPIKEY, setHasSetCustomerAPIKEY] = useState(true)
const [isTrailFinished, setIsTrailFinished] = useState(false)
const { textGenerationModelList } = useProviderContext()
const hasSetCustomAPIKEY = !!textGenerationModelList?.find(({ model_provider: provider }) => {
if (provider.provider_type === 'system' && provider.quota_type === 'paid')
return true
if (provider.provider_type === 'custom')
return true
return false
})
const isTrailFinished = !hasSetCustomAPIKEY && textGenerationModelList
.filter(({ model_provider: provider }) => provider.quota_type === 'trial')
.every(({ model_provider: provider }) => {
const { quota_used, quota_limit } = provider
return quota_used === quota_limit
})
const hasSetAPIKEY = hasSetCustomAPIKEY || !isTrailFinished
const [isShowSetAPIKey, { setTrue: showSetAPIKey, setFalse: hideSetAPIkey }] = useBoolean()
const checkAPIKey = async () => {
const { in_trail, trial_end_reason } = await fetchTenantInfo({ url: '/info' })
const isTrailFinished = in_trail && trial_end_reason === 'trial_exceeded'
const hasSetCustomAPIKEY = trial_end_reason === 'using_custom'
setHasSetCustomerAPIKEY(hasSetCustomAPIKEY)
setIsTrailFinished(isTrailFinished)
setHasFetchedKey(true)
}
useEffect(() => {
checkAPIKey()
}, [])
useEffect(() => {
(fetchAppDetail({ url: '/apps', id: appId }) as any).then(async (res: AppDetailResponse) => {
setMode(res.mode)
@@ -284,7 +285,7 @@ const Configuration: FC = () => {
{/* Model and Parameters */}
<ConfigModel
mode={mode}
provider={modelConfig.provider as ProviderType}
provider={modelConfig.provider as ProviderEnum}
completionParams={completionParams}
modelId={modelConfig.model_id}
setModelId={setModelId}
@@ -338,7 +339,6 @@ const Configuration: FC = () => {
)
}
{isShowSetAPIKey && <AccountSetting activeTab="provider" onCancel={async () => {
await checkAPIKey()
hideSetAPIkey()
}} />}
</>

View File

@@ -13,6 +13,7 @@ import SettingsModal from './settings'
import EmbeddedModal from './embedded'
import CustomizeModal from './customize'
import style from './style.module.css'
import type { ConfigParams } from './settings'
import Tooltip from '@/app/components/base/tooltip'
import AppBasic from '@/app/components/app-sidebar/basic'
import { asyncRunSafe, randomString } from '@/utils'
@@ -31,9 +32,9 @@ export type IAppCardProps = {
appInfo: AppDetailResponse
cardType?: 'api' | 'webapp'
customBgColor?: string
onChangeStatus: (val: boolean) => Promise<any>
onSaveSiteConfig?: (params: any) => Promise<any>
onGenerateCode?: () => Promise<any>
onChangeStatus: (val: boolean) => Promise<void>
onSaveSiteConfig?: (params: ConfigParams) => Promise<void>
onGenerateCode?: () => Promise<void>
}
const EmbedIcon: FC<{ className?: string }> = ({ className = '' }) => {
@@ -193,7 +194,7 @@ function AppCard({
</div>
<div className={'pt-2 flex flex-row items-center'}>
{!isApp && <SecretKeyButton className='flex-shrink-0 !h-8 bg-white mr-2' textCls='!text-gray-700 font-medium' iconCls='stroke-[1.2px]' appId={appInfo.id} />}
{OPERATIONS_MAP[cardType].map((op: any) => {
{OPERATIONS_MAP[cardType].map((op) => {
const disabled
= op.opName === t('appOverview.overview.appInfo.settings.entry')
? false

View File

@@ -18,7 +18,7 @@ export type ISettingsModalProps = {
isShow: boolean
defaultValue?: string
onClose: () => void
onSave?: (params: ConfigParams) => Promise<any>
onSave?: (params: ConfigParams) => Promise<void>
}
export type ConfigParams = {
@@ -26,6 +26,10 @@ export type ConfigParams = {
description: string
default_language: string
prompt_public: boolean
copyright: string
privacy_policy: string
icon: string
icon_background: string
}
const LANGUAGE_MAP: Record<Language, string> = {
@@ -82,7 +86,7 @@ const SettingsModal: FC<ISettingsModalProps> = ({
}
const onChange = (field: string) => {
return (e: any) => {
return (e: React.ChangeEvent<HTMLInputElement | HTMLTextAreaElement>) => {
setInputInfo(item => ({ ...item, [field]: e.target.value }))
}
}

View File

@@ -10,7 +10,7 @@ import { HashtagIcon } from '@heroicons/react/24/solid'
import { Markdown } from '@/app/components/base/markdown'
import Loading from '@/app/components/base/loading'
import Toast from '@/app/components/base/toast'
import type { Feedbacktype } from '@/app/components/app/chat'
import type { Feedbacktype } from '@/app/components/app/chat/type'
import { fetchMoreLikeThis, updateFeedback } from '@/service/share'
const MAX_DEPTH = 3
@@ -136,7 +136,7 @@ const GenerationItem: FC<IGenerationItemProps> = ({
}
const mainStyle = (() => {
const res: any = !isTop
const res: React.CSSProperties = !isTop
? {
background: depth % 2 === 0 ? 'linear-gradient(90.07deg, #F9FAFB 0.05%, rgba(249, 250, 251, 0) 99.93%)' : '#fff',
}

View File

@@ -7,6 +7,7 @@ export type IButtonProps = {
className?: string
disabled?: boolean
loading?: boolean
tabIndex?: number
children: React.ReactNode
onClick?: MouseEventHandler<HTMLDivElement>
}
@@ -18,6 +19,7 @@ const Button: FC<IButtonProps> = ({
className,
onClick,
loading = false,
tabIndex,
}) => {
let style = 'cursor-pointer'
switch (type) {
@@ -35,6 +37,7 @@ const Button: FC<IButtonProps> = ({
return (
<div
className={`inline-flex justify-center items-center content-center h-9 leading-5 rounded-lg px-4 py-2 text-base ${style} ${className && className}`}
tabIndex={tabIndex}
onClick={disabled ? undefined : onClick}
>
{children}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 11 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 7.1 KiB

View File

@@ -0,0 +1,42 @@
<svg width="152" height="24" viewBox="0 0 152 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="xorbits 1" clip-path="url(#clip0_9866_6170)">
<path id="Vector" d="M8.00391 12.3124C8.69334 13.0754 9.47526 13.7494 10.3316 14.3188C11.0667 14.8105 11.8509 15.2245 12.6716 15.5541C14.1617 14.1465 15.3959 12.4907 16.3192 10.6606L21.7051 0L12.3133 7.38353C10.5832 8.74456 9.12178 10.416 8.00391 12.3124Z" fill="url(#paint0_linear_9866_6170)"/>
<path id="Vector_2" d="M7.23504 18.9512C6.56092 18.5012 5.92386 18.0265 5.3221 17.5394L2.06445 24L7.91975 19.3959C7.69034 19.2494 7.46092 19.103 7.23504 18.9512Z" fill="url(#paint1_linear_9866_6170)"/>
<path id="Vector_3" d="M19.3161 8.57474C21.0808 10.9147 21.5961 13.5159 20.3996 15.3053C18.6526 17.9189 13.9161 17.8183 9.82024 15.0812C5.72435 12.3441 3.82024 8.0065 5.56729 5.39297C6.76377 3.60356 9.36318 3.0865 12.2008 3.81886C7.29318 1.73474 2.62376 1.94121 0.813177 4.64474C-1.45976 8.04709 1.64435 14.1177 7.74494 18.1889C13.8455 22.26 20.6361 22.8124 22.9091 19.4118C24.7179 16.703 23.1173 12.3106 19.3161 8.57474Z" fill="url(#paint2_linear_9866_6170)"/>
<g id="Xorbits Inference">
<path d="M35.5162 12.142L38.5402 17H36.7482L34.5502 13.472L32.4922 17H30.7142L33.7382 12.142L30.7002 7.27002H32.4922L34.7042 10.826L36.7762 7.27002H38.5542L35.5162 12.142Z" fill="#1D2939"/>
<path d="M43.3584 17.126C42.6304 17.126 41.9724 16.9627 41.3844 16.636C40.7964 16.3 40.3344 15.8334 39.9984 15.236C39.6624 14.6294 39.4944 13.9293 39.4944 13.136C39.4944 12.352 39.6671 11.6567 40.0124 11.05C40.3577 10.4434 40.8291 9.97668 41.4264 9.65002C42.0237 9.32335 42.6911 9.16002 43.4284 9.16002C44.1657 9.16002 44.8331 9.32335 45.4304 9.65002C46.0277 9.97668 46.4991 10.4434 46.8444 11.05C47.1897 11.6567 47.3624 12.352 47.3624 13.136C47.3624 13.92 47.185 14.6154 46.8304 15.222C46.4757 15.8287 45.9904 16.3 45.3744 16.636C44.7677 16.9627 44.0957 17.126 43.3584 17.126ZM43.3584 15.74C43.769 15.74 44.1517 15.642 44.5064 15.446C44.8704 15.25 45.1644 14.956 45.3884 14.564C45.6124 14.172 45.7244 13.696 45.7244 13.136C45.7244 12.576 45.6171 12.1047 45.4024 11.722C45.1877 11.33 44.9031 11.036 44.5484 10.84C44.1937 10.644 43.8111 10.546 43.4004 10.546C42.9897 10.546 42.607 10.644 42.2524 10.84C41.9071 11.036 41.6317 11.33 41.4264 11.722C41.221 12.1047 41.1184 12.576 41.1184 13.136C41.1184 13.9667 41.3284 14.6107 41.7484 15.068C42.1777 15.516 42.7144 15.74 43.3584 15.74Z" fill="#1D2939"/>
<path d="M50.2561 10.406C50.4895 10.014 50.7974 9.71068 51.1801 9.49602C51.5721 9.27202 52.0341 9.16002 52.5661 9.16002V10.812H52.1601C51.5348 10.812 51.0588 10.9707 50.7321 11.288C50.4148 11.6054 50.2561 12.156 50.2561 12.94V17H48.6601V9.28602H50.2561V10.406Z" fill="#1D2939"/>
<path d="M55.3492 10.434C55.6198 10.0607 55.9885 9.75735 56.4552 9.52402C56.9312 9.28135 57.4585 9.16002 58.0372 9.16002C58.7185 9.16002 59.3345 9.32335 59.8852 9.65002C60.4358 9.97668 60.8698 10.4434 61.1872 11.05C61.5045 11.6473 61.6632 12.3333 61.6632 13.108C61.6632 13.8827 61.5045 14.578 61.1872 15.194C60.8698 15.8007 60.4312 16.2767 59.8712 16.622C59.3205 16.958 58.7092 17.126 58.0372 17.126C57.4398 17.126 56.9078 17.0093 56.4412 16.776C55.9838 16.5427 55.6198 16.244 55.3492 15.88V17H53.7532V6.64002H55.3492V10.434ZM60.0392 13.108C60.0392 12.576 59.9272 12.1187 59.7032 11.736C59.4885 11.344 59.1992 11.05 58.8352 10.854C58.4805 10.6487 58.0978 10.546 57.6872 10.546C57.2858 10.546 56.9032 10.6487 56.5392 10.854C56.1845 11.0594 55.8952 11.358 55.6712 11.75C55.4565 12.142 55.3492 12.604 55.3492 13.136C55.3492 13.668 55.4565 14.1347 55.6712 14.536C55.8952 14.928 56.1845 15.2267 56.5392 15.432C56.9032 15.6374 57.2858 15.74 57.6872 15.74C58.0978 15.74 58.4805 15.6374 58.8352 15.432C59.1992 15.2174 59.4885 14.9093 59.7032 14.508C59.9272 14.1067 60.0392 13.64 60.0392 13.108Z" fill="#1D2939"/>
<path d="M63.7734 8.26402C63.4841 8.26402 63.2414 8.16602 63.0454 7.97002C62.8494 7.77402 62.7514 7.53135 62.7514 7.24202C62.7514 6.95268 62.8494 6.71002 63.0454 6.51402C63.2414 6.31802 63.4841 6.22002 63.7734 6.22002C64.0534 6.22002 64.2914 6.31802 64.4874 6.51402C64.6834 6.71002 64.7814 6.95268 64.7814 7.24202C64.7814 7.53135 64.6834 7.77402 64.4874 7.97002C64.2914 8.16602 64.0534 8.26402 63.7734 8.26402ZM64.5574 9.28602V17H62.9614V9.28602H64.5574Z" fill="#1D2939"/>
<path d="M68.2348 10.588V14.858C68.2348 15.1474 68.3002 15.3573 68.4309 15.488C68.5709 15.6093 68.8042 15.67 69.1308 15.67H70.1109V17H68.8508C68.1322 17 67.5815 16.832 67.1988 16.496C66.8162 16.16 66.6248 15.614 66.6248 14.858V10.588H65.7148V9.28602H66.6248V7.36802H68.2348V9.28602H70.1109V10.588H68.2348Z" fill="#1D2939"/>
<path d="M74.1018 17.126C73.4952 17.126 72.9492 17.0187 72.4638 16.804C71.9878 16.58 71.6098 16.2813 71.3298 15.908C71.0498 15.5253 70.9005 15.1007 70.8818 14.634H72.5338C72.5618 14.9607 72.7158 15.236 72.9958 15.46C73.2852 15.6747 73.6445 15.782 74.0738 15.782C74.5218 15.782 74.8672 15.698 75.1098 15.53C75.3618 15.3527 75.4878 15.1287 75.4878 14.858C75.4878 14.5687 75.3478 14.354 75.0678 14.214C74.7972 14.074 74.3632 13.92 73.7658 13.752C73.1872 13.5933 72.7158 13.4394 72.3518 13.29C71.9878 13.1407 71.6705 12.912 71.3998 12.604C71.1385 12.296 71.0078 11.89 71.0078 11.386C71.0078 10.9753 71.1292 10.602 71.3718 10.266C71.6145 9.92068 71.9598 9.65002 72.4078 9.45402C72.8652 9.25802 73.3878 9.16002 73.9758 9.16002C74.8532 9.16002 75.5578 9.38402 76.0898 9.83202C76.6312 10.2707 76.9205 10.8727 76.9578 11.638H75.3618C75.3338 11.2927 75.1938 11.0173 74.9418 10.812C74.6898 10.6067 74.3492 10.504 73.9198 10.504C73.4998 10.504 73.1778 10.5833 72.9538 10.742C72.7298 10.9007 72.6178 11.1107 72.6178 11.372C72.6178 11.5773 72.6925 11.75 72.8418 11.89C72.9912 12.03 73.1732 12.142 73.3878 12.226C73.6025 12.3007 73.9198 12.3987 74.3398 12.52C74.8998 12.6693 75.3572 12.8233 75.7118 12.982C76.0758 13.1314 76.3885 13.3554 76.6498 13.654C76.9112 13.9527 77.0465 14.3493 77.0558 14.844C77.0558 15.2827 76.9345 15.6747 76.6918 16.02C76.4492 16.3654 76.1038 16.636 75.6558 16.832C75.2172 17.028 74.6992 17.126 74.1018 17.126Z" fill="#1D2939"/>
<path d="M83.4531 7.27002V17H81.8571V7.27002H83.4531Z" fill="#1D2939"/>
<path d="M89.1605 9.16002C89.7671 9.16002 90.3085 9.28602 90.7845 9.53802C91.2698 9.79002 91.6478 10.1633 91.9185 10.658C92.1891 11.1527 92.3245 11.75 92.3245 12.45V17H90.7425V12.688C90.7425 11.9973 90.5698 11.47 90.2245 11.106C89.8791 10.7327 89.4078 10.546 88.8105 10.546C88.2131 10.546 87.7371 10.7327 87.3825 11.106C87.0371 11.47 86.8645 11.9973 86.8645 12.688V17H85.2685V9.28602H86.8645V10.168C87.1258 9.85068 87.4571 9.60335 87.8585 9.42602C88.2691 9.24868 88.7031 9.16002 89.1605 9.16002Z" fill="#1D2939"/>
<path d="M97.3143 10.588H95.8863V17H94.2763V10.588H93.3663V9.28602H94.2763V8.74002C94.2763 7.85335 94.5096 7.20935 94.9763 6.80802C95.4523 6.39735 96.1943 6.19202 97.2023 6.19202V7.52202C96.7169 7.52202 96.3763 7.61535 96.1803 7.80202C95.9843 7.97935 95.8863 8.29202 95.8863 8.74002V9.28602H97.3143V10.588Z" fill="#1D2939"/>
<path d="M105.519 12.954C105.519 13.2433 105.5 13.5047 105.463 13.738H99.5687C99.6154 14.354 99.844 14.8487 100.255 15.222C100.665 15.5954 101.169 15.782 101.767 15.782C102.625 15.782 103.232 15.4227 103.587 14.704H105.309C105.075 15.4133 104.651 15.9967 104.035 16.454C103.428 16.902 102.672 17.126 101.767 17.126C101.029 17.126 100.367 16.9627 99.7787 16.636C99.2 16.3 98.7427 15.8334 98.4067 15.236C98.08 14.6294 97.9167 13.9293 97.9167 13.136C97.9167 12.3427 98.0754 11.6473 98.3927 11.05C98.7194 10.4434 99.172 9.97668 99.7507 9.65002C100.339 9.32335 101.011 9.16002 101.767 9.16002C102.495 9.16002 103.143 9.31868 103.713 9.63602C104.282 9.95335 104.725 10.4014 105.043 10.98C105.36 11.5493 105.519 12.2073 105.519 12.954ZM103.853 12.45C103.843 11.862 103.633 11.3907 103.223 11.036C102.812 10.6813 102.303 10.504 101.697 10.504C101.146 10.504 100.675 10.6813 100.283 11.036C99.8907 11.3813 99.6574 11.8527 99.5827 12.45H103.853Z" fill="#1D2939"/>
<path d="M108.405 10.406C108.639 10.014 108.947 9.71068 109.329 9.49602C109.721 9.27202 110.183 9.16002 110.715 9.16002V10.812H110.309C109.684 10.812 109.208 10.9707 108.881 11.288C108.564 11.6054 108.405 12.156 108.405 12.94V17H106.809V9.28602H108.405V10.406Z" fill="#1D2939"/>
<path d="M118.972 12.954C118.972 13.2433 118.954 13.5047 118.916 13.738H113.022C113.069 14.354 113.298 14.8487 113.708 15.222C114.119 15.5954 114.623 15.782 115.22 15.782C116.079 15.782 116.686 15.4227 117.04 14.704H118.762C118.529 15.4133 118.104 15.9967 117.488 16.454C116.882 16.902 116.126 17.126 115.22 17.126C114.483 17.126 113.82 16.9627 113.232 16.636C112.654 16.3 112.196 15.8334 111.86 15.236C111.534 14.6294 111.37 13.9293 111.37 13.136C111.37 12.3427 111.529 11.6473 111.846 11.05C112.173 10.4434 112.626 9.97668 113.204 9.65002C113.792 9.32335 114.464 9.16002 115.22 9.16002C115.948 9.16002 116.597 9.31868 117.166 9.63602C117.736 9.95335 118.179 10.4014 118.496 10.98C118.814 11.5493 118.972 12.2073 118.972 12.954ZM117.306 12.45C117.297 11.862 117.087 11.3907 116.676 11.036C116.266 10.6813 115.757 10.504 115.15 10.504C114.6 10.504 114.128 10.6813 113.736 11.036C113.344 11.3813 113.111 11.8527 113.036 12.45H117.306Z" fill="#1D2939"/>
<path d="M124.155 9.16002C124.762 9.16002 125.303 9.28602 125.779 9.53802C126.264 9.79002 126.642 10.1633 126.913 10.658C127.184 11.1527 127.319 11.75 127.319 12.45V17H125.737V12.688C125.737 11.9973 125.564 11.47 125.219 11.106C124.874 10.7327 124.402 10.546 123.805 10.546C123.208 10.546 122.732 10.7327 122.377 11.106C122.032 11.47 121.859 11.9973 121.859 12.688V17H120.263V9.28602H121.859V10.168C122.12 9.85068 122.452 9.60335 122.853 9.42602C123.264 9.24868 123.698 9.16002 124.155 9.16002Z" fill="#1D2939"/>
<path d="M128.543 13.136C128.543 12.3427 128.701 11.6473 129.019 11.05C129.345 10.4434 129.793 9.97668 130.363 9.65002C130.932 9.32335 131.585 9.16002 132.323 9.16002C133.256 9.16002 134.026 9.38402 134.633 9.83202C135.249 10.2707 135.664 10.9007 135.879 11.722H134.157C134.017 11.3394 133.793 11.0407 133.485 10.826C133.177 10.6113 132.789 10.504 132.323 10.504C131.669 10.504 131.147 10.7373 130.755 11.204C130.372 11.6613 130.181 12.3053 130.181 13.136C130.181 13.9667 130.372 14.6153 130.755 15.082C131.147 15.5487 131.669 15.782 132.323 15.782C133.247 15.782 133.858 15.376 134.157 14.564H135.879C135.655 15.348 135.235 15.9733 134.619 16.44C134.003 16.8973 133.237 17.126 132.323 17.126C131.585 17.126 130.932 16.9627 130.363 16.636C129.793 16.3 129.345 15.8334 129.019 15.236C128.701 14.6294 128.543 13.9293 128.543 13.136Z" fill="#1D2939"/>
<path d="M144.259 12.954C144.259 13.2433 144.241 13.5047 144.203 13.738H138.309C138.356 14.354 138.585 14.8487 138.995 15.222C139.406 15.5954 139.91 15.782 140.507 15.782C141.366 15.782 141.973 15.4227 142.327 14.704H144.049C143.816 15.4133 143.391 15.9967 142.775 16.454C142.169 16.902 141.413 17.126 140.507 17.126C139.77 17.126 139.107 16.9627 138.519 16.636C137.941 16.3 137.483 15.8334 137.147 15.236C136.821 14.6294 136.657 13.9293 136.657 13.136C136.657 12.3427 136.816 11.6473 137.133 11.05C137.46 10.4434 137.913 9.97668 138.491 9.65002C139.079 9.32335 139.751 9.16002 140.507 9.16002C141.235 9.16002 141.884 9.31868 142.453 9.63602C143.023 9.95335 143.466 10.4014 143.783 10.98C144.101 11.5493 144.259 12.2073 144.259 12.954ZM142.593 12.45C142.584 11.862 142.374 11.3907 141.963 11.036C141.553 10.6813 141.044 10.504 140.437 10.504C139.887 10.504 139.415 10.6813 139.023 11.036C138.631 11.3813 138.398 11.8527 138.323 12.45H142.593Z" fill="#1D2939"/>
</g>
</g>
<defs>
<linearGradient id="paint0_linear_9866_6170" x1="2.15214" y1="24.3018" x2="21.2921" y2="0.0988218" gradientUnits="userSpaceOnUse">
<stop stop-color="#E9A85E"/>
<stop offset="1" stop-color="#F52B76"/>
</linearGradient>
<linearGradient id="paint1_linear_9866_6170" x1="2.06269" y1="24.2294" x2="21.2027" y2="0.028252" gradientUnits="userSpaceOnUse">
<stop stop-color="#E9A85E"/>
<stop offset="1" stop-color="#F52B76"/>
</linearGradient>
<linearGradient id="paint2_linear_9866_6170" x1="-0.613606" y1="3.843" x2="21.4449" y2="18.7258" gradientUnits="userSpaceOnUse">
<stop stop-color="#6A0CF5"/>
<stop offset="1" stop-color="#AB66F3"/>
</linearGradient>
<clipPath id="clip0_9866_6170">
<rect width="152" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -0,0 +1,24 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g id="Xorbits Square" clip-path="url(#clip0_9850_26870)">
<path id="Vector" d="M8.00391 12.3124C8.69334 13.0754 9.47526 13.7494 10.3316 14.3188C11.0667 14.8105 11.8509 15.2245 12.6716 15.5541C14.1617 14.1465 15.3959 12.4907 16.3192 10.6606L21.7051 0L12.3133 7.38353C10.5832 8.74456 9.12178 10.416 8.00391 12.3124Z" fill="url(#paint0_linear_9850_26870)"/>
<path id="Vector_2" d="M7.23504 18.9512C6.56092 18.5012 5.92386 18.0265 5.3221 17.5394L2.06445 24L7.91975 19.3959C7.69034 19.2494 7.46092 19.103 7.23504 18.9512Z" fill="url(#paint1_linear_9850_26870)"/>
<path id="Vector_3" d="M19.3161 8.57474C21.0808 10.9147 21.5961 13.5159 20.3996 15.3053C18.6526 17.9189 13.9161 17.8183 9.82024 15.0812C5.72435 12.3441 3.82024 8.0065 5.56729 5.39297C6.76377 3.60356 9.36318 3.0865 12.2008 3.81886C7.29318 1.73474 2.62376 1.94121 0.813177 4.64474C-1.45976 8.04709 1.64435 14.1177 7.74494 18.1889C13.8455 22.26 20.6361 22.8124 22.9091 19.4118C24.7179 16.703 23.1173 12.3106 19.3161 8.57474Z" fill="url(#paint2_linear_9850_26870)"/>
</g>
<defs>
<linearGradient id="paint0_linear_9850_26870" x1="2.15214" y1="24.3018" x2="21.2921" y2="0.0988218" gradientUnits="userSpaceOnUse">
<stop stop-color="#E9A85E"/>
<stop offset="1" stop-color="#F52B76"/>
</linearGradient>
<linearGradient id="paint1_linear_9850_26870" x1="2.06269" y1="24.2294" x2="21.2027" y2="0.028252" gradientUnits="userSpaceOnUse">
<stop stop-color="#E9A85E"/>
<stop offset="1" stop-color="#F52B76"/>
</linearGradient>
<linearGradient id="paint2_linear_9850_26870" x1="-0.613606" y1="3.843" x2="21.4449" y2="18.7258" gradientUnits="userSpaceOnUse">
<stop stop-color="#6A0CF5"/>
<stop offset="1" stop-color="#AB66F3"/>
</linearGradient>
<clipPath id="clip0_9850_26870">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,14 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './Openllm.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
export default Icon

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,14 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './OpenllmText.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
export default Icon

View File

@@ -0,0 +1,176 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "24",
"height": "24",
"viewBox": "0 0 24 24",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "Xorbits Square",
"clip-path": "url(#clip0_9850_26870)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector",
"d": "M8.00391 12.3124C8.69334 13.0754 9.47526 13.7494 10.3316 14.3188C11.0667 14.8105 11.8509 15.2245 12.6716 15.5541C14.1617 14.1465 15.3959 12.4907 16.3192 10.6606L21.7051 0L12.3133 7.38353C10.5832 8.74456 9.12178 10.416 8.00391 12.3124Z",
"fill": "url(#paint0_linear_9850_26870)"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector_2",
"d": "M7.23504 18.9512C6.56092 18.5012 5.92386 18.0265 5.3221 17.5394L2.06445 24L7.91975 19.3959C7.69034 19.2494 7.46092 19.103 7.23504 18.9512Z",
"fill": "url(#paint1_linear_9850_26870)"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector_3",
"d": "M19.3161 8.57474C21.0808 10.9147 21.5961 13.5159 20.3996 15.3053C18.6526 17.9189 13.9161 17.8183 9.82024 15.0812C5.72435 12.3441 3.82024 8.0065 5.56729 5.39297C6.76377 3.60356 9.36318 3.0865 12.2008 3.81886C7.29318 1.73474 2.62376 1.94121 0.813177 4.64474C-1.45976 8.04709 1.64435 14.1177 7.74494 18.1889C13.8455 22.26 20.6361 22.8124 22.9091 19.4118C24.7179 16.703 23.1173 12.3106 19.3161 8.57474Z",
"fill": "url(#paint2_linear_9850_26870)"
},
"children": []
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint0_linear_9850_26870",
"x1": "2.15214",
"y1": "24.3018",
"x2": "21.2921",
"y2": "0.0988218",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#E9A85E"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#F52B76"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint1_linear_9850_26870",
"x1": "2.06269",
"y1": "24.2294",
"x2": "21.2027",
"y2": "0.028252",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#E9A85E"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#F52B76"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint2_linear_9850_26870",
"x1": "-0.613606",
"y1": "3.843",
"x2": "21.4449",
"y2": "18.7258",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#6A0CF5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#AB66F3"
},
"children": []
}
]
},
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_9850_26870"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "24",
"height": "24",
"fill": "white"
},
"children": []
}
]
}
]
}
]
},
"name": "XorbitsInference"
}

View File

@@ -0,0 +1,14 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './XorbitsInference.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
export default Icon

View File

@@ -0,0 +1,329 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "152",
"height": "24",
"viewBox": "0 0 152 24",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "g",
"attributes": {
"id": "xorbits 1",
"clip-path": "url(#clip0_9866_6170)"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector",
"d": "M8.00391 12.3124C8.69334 13.0754 9.47526 13.7494 10.3316 14.3188C11.0667 14.8105 11.8509 15.2245 12.6716 15.5541C14.1617 14.1465 15.3959 12.4907 16.3192 10.6606L21.7051 0L12.3133 7.38353C10.5832 8.74456 9.12178 10.416 8.00391 12.3124Z",
"fill": "url(#paint0_linear_9866_6170)"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector_2",
"d": "M7.23504 18.9512C6.56092 18.5012 5.92386 18.0265 5.3221 17.5394L2.06445 24L7.91975 19.3959C7.69034 19.2494 7.46092 19.103 7.23504 18.9512Z",
"fill": "url(#paint1_linear_9866_6170)"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"id": "Vector_3",
"d": "M19.3161 8.57474C21.0808 10.9147 21.5961 13.5159 20.3996 15.3053C18.6526 17.9189 13.9161 17.8183 9.82024 15.0812C5.72435 12.3441 3.82024 8.0065 5.56729 5.39297C6.76377 3.60356 9.36318 3.0865 12.2008 3.81886C7.29318 1.73474 2.62376 1.94121 0.813177 4.64474C-1.45976 8.04709 1.64435 14.1177 7.74494 18.1889C13.8455 22.26 20.6361 22.8124 22.9091 19.4118C24.7179 16.703 23.1173 12.3106 19.3161 8.57474Z",
"fill": "url(#paint2_linear_9866_6170)"
},
"children": []
},
{
"type": "element",
"name": "g",
"attributes": {
"id": "Xorbits Inference"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M35.5162 12.142L38.5402 17H36.7482L34.5502 13.472L32.4922 17H30.7142L33.7382 12.142L30.7002 7.27002H32.4922L34.7042 10.826L36.7762 7.27002H38.5542L35.5162 12.142Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M43.3584 17.126C42.6304 17.126 41.9724 16.9627 41.3844 16.636C40.7964 16.3 40.3344 15.8334 39.9984 15.236C39.6624 14.6294 39.4944 13.9293 39.4944 13.136C39.4944 12.352 39.6671 11.6567 40.0124 11.05C40.3577 10.4434 40.8291 9.97668 41.4264 9.65002C42.0237 9.32335 42.6911 9.16002 43.4284 9.16002C44.1657 9.16002 44.8331 9.32335 45.4304 9.65002C46.0277 9.97668 46.4991 10.4434 46.8444 11.05C47.1897 11.6567 47.3624 12.352 47.3624 13.136C47.3624 13.92 47.185 14.6154 46.8304 15.222C46.4757 15.8287 45.9904 16.3 45.3744 16.636C44.7677 16.9627 44.0957 17.126 43.3584 17.126ZM43.3584 15.74C43.769 15.74 44.1517 15.642 44.5064 15.446C44.8704 15.25 45.1644 14.956 45.3884 14.564C45.6124 14.172 45.7244 13.696 45.7244 13.136C45.7244 12.576 45.6171 12.1047 45.4024 11.722C45.1877 11.33 44.9031 11.036 44.5484 10.84C44.1937 10.644 43.8111 10.546 43.4004 10.546C42.9897 10.546 42.607 10.644 42.2524 10.84C41.9071 11.036 41.6317 11.33 41.4264 11.722C41.221 12.1047 41.1184 12.576 41.1184 13.136C41.1184 13.9667 41.3284 14.6107 41.7484 15.068C42.1777 15.516 42.7144 15.74 43.3584 15.74Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M50.2561 10.406C50.4895 10.014 50.7974 9.71068 51.1801 9.49602C51.5721 9.27202 52.0341 9.16002 52.5661 9.16002V10.812H52.1601C51.5348 10.812 51.0588 10.9707 50.7321 11.288C50.4148 11.6054 50.2561 12.156 50.2561 12.94V17H48.6601V9.28602H50.2561V10.406Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M55.3492 10.434C55.6198 10.0607 55.9885 9.75735 56.4552 9.52402C56.9312 9.28135 57.4585 9.16002 58.0372 9.16002C58.7185 9.16002 59.3345 9.32335 59.8852 9.65002C60.4358 9.97668 60.8698 10.4434 61.1872 11.05C61.5045 11.6473 61.6632 12.3333 61.6632 13.108C61.6632 13.8827 61.5045 14.578 61.1872 15.194C60.8698 15.8007 60.4312 16.2767 59.8712 16.622C59.3205 16.958 58.7092 17.126 58.0372 17.126C57.4398 17.126 56.9078 17.0093 56.4412 16.776C55.9838 16.5427 55.6198 16.244 55.3492 15.88V17H53.7532V6.64002H55.3492V10.434ZM60.0392 13.108C60.0392 12.576 59.9272 12.1187 59.7032 11.736C59.4885 11.344 59.1992 11.05 58.8352 10.854C58.4805 10.6487 58.0978 10.546 57.6872 10.546C57.2858 10.546 56.9032 10.6487 56.5392 10.854C56.1845 11.0594 55.8952 11.358 55.6712 11.75C55.4565 12.142 55.3492 12.604 55.3492 13.136C55.3492 13.668 55.4565 14.1347 55.6712 14.536C55.8952 14.928 56.1845 15.2267 56.5392 15.432C56.9032 15.6374 57.2858 15.74 57.6872 15.74C58.0978 15.74 58.4805 15.6374 58.8352 15.432C59.1992 15.2174 59.4885 14.9093 59.7032 14.508C59.9272 14.1067 60.0392 13.64 60.0392 13.108Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M63.7734 8.26402C63.4841 8.26402 63.2414 8.16602 63.0454 7.97002C62.8494 7.77402 62.7514 7.53135 62.7514 7.24202C62.7514 6.95268 62.8494 6.71002 63.0454 6.51402C63.2414 6.31802 63.4841 6.22002 63.7734 6.22002C64.0534 6.22002 64.2914 6.31802 64.4874 6.51402C64.6834 6.71002 64.7814 6.95268 64.7814 7.24202C64.7814 7.53135 64.6834 7.77402 64.4874 7.97002C64.2914 8.16602 64.0534 8.26402 63.7734 8.26402ZM64.5574 9.28602V17H62.9614V9.28602H64.5574Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M68.2348 10.588V14.858C68.2348 15.1474 68.3002 15.3573 68.4309 15.488C68.5709 15.6093 68.8042 15.67 69.1308 15.67H70.1109V17H68.8508C68.1322 17 67.5815 16.832 67.1988 16.496C66.8162 16.16 66.6248 15.614 66.6248 14.858V10.588H65.7148V9.28602H66.6248V7.36802H68.2348V9.28602H70.1109V10.588H68.2348Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M74.1018 17.126C73.4952 17.126 72.9492 17.0187 72.4638 16.804C71.9878 16.58 71.6098 16.2813 71.3298 15.908C71.0498 15.5253 70.9005 15.1007 70.8818 14.634H72.5338C72.5618 14.9607 72.7158 15.236 72.9958 15.46C73.2852 15.6747 73.6445 15.782 74.0738 15.782C74.5218 15.782 74.8672 15.698 75.1098 15.53C75.3618 15.3527 75.4878 15.1287 75.4878 14.858C75.4878 14.5687 75.3478 14.354 75.0678 14.214C74.7972 14.074 74.3632 13.92 73.7658 13.752C73.1872 13.5933 72.7158 13.4394 72.3518 13.29C71.9878 13.1407 71.6705 12.912 71.3998 12.604C71.1385 12.296 71.0078 11.89 71.0078 11.386C71.0078 10.9753 71.1292 10.602 71.3718 10.266C71.6145 9.92068 71.9598 9.65002 72.4078 9.45402C72.8652 9.25802 73.3878 9.16002 73.9758 9.16002C74.8532 9.16002 75.5578 9.38402 76.0898 9.83202C76.6312 10.2707 76.9205 10.8727 76.9578 11.638H75.3618C75.3338 11.2927 75.1938 11.0173 74.9418 10.812C74.6898 10.6067 74.3492 10.504 73.9198 10.504C73.4998 10.504 73.1778 10.5833 72.9538 10.742C72.7298 10.9007 72.6178 11.1107 72.6178 11.372C72.6178 11.5773 72.6925 11.75 72.8418 11.89C72.9912 12.03 73.1732 12.142 73.3878 12.226C73.6025 12.3007 73.9198 12.3987 74.3398 12.52C74.8998 12.6693 75.3572 12.8233 75.7118 12.982C76.0758 13.1314 76.3885 13.3554 76.6498 13.654C76.9112 13.9527 77.0465 14.3493 77.0558 14.844C77.0558 15.2827 76.9345 15.6747 76.6918 16.02C76.4492 16.3654 76.1038 16.636 75.6558 16.832C75.2172 17.028 74.6992 17.126 74.1018 17.126Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M83.4531 7.27002V17H81.8571V7.27002H83.4531Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M89.1605 9.16002C89.7671 9.16002 90.3085 9.28602 90.7845 9.53802C91.2698 9.79002 91.6478 10.1633 91.9185 10.658C92.1891 11.1527 92.3245 11.75 92.3245 12.45V17H90.7425V12.688C90.7425 11.9973 90.5698 11.47 90.2245 11.106C89.8791 10.7327 89.4078 10.546 88.8105 10.546C88.2131 10.546 87.7371 10.7327 87.3825 11.106C87.0371 11.47 86.8645 11.9973 86.8645 12.688V17H85.2685V9.28602H86.8645V10.168C87.1258 9.85068 87.4571 9.60335 87.8585 9.42602C88.2691 9.24868 88.7031 9.16002 89.1605 9.16002Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M97.3143 10.588H95.8863V17H94.2763V10.588H93.3663V9.28602H94.2763V8.74002C94.2763 7.85335 94.5096 7.20935 94.9763 6.80802C95.4523 6.39735 96.1943 6.19202 97.2023 6.19202V7.52202C96.7169 7.52202 96.3763 7.61535 96.1803 7.80202C95.9843 7.97935 95.8863 8.29202 95.8863 8.74002V9.28602H97.3143V10.588Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M105.519 12.954C105.519 13.2433 105.5 13.5047 105.463 13.738H99.5687C99.6154 14.354 99.844 14.8487 100.255 15.222C100.665 15.5954 101.169 15.782 101.767 15.782C102.625 15.782 103.232 15.4227 103.587 14.704H105.309C105.075 15.4133 104.651 15.9967 104.035 16.454C103.428 16.902 102.672 17.126 101.767 17.126C101.029 17.126 100.367 16.9627 99.7787 16.636C99.2 16.3 98.7427 15.8334 98.4067 15.236C98.08 14.6294 97.9167 13.9293 97.9167 13.136C97.9167 12.3427 98.0754 11.6473 98.3927 11.05C98.7194 10.4434 99.172 9.97668 99.7507 9.65002C100.339 9.32335 101.011 9.16002 101.767 9.16002C102.495 9.16002 103.143 9.31868 103.713 9.63602C104.282 9.95335 104.725 10.4014 105.043 10.98C105.36 11.5493 105.519 12.2073 105.519 12.954ZM103.853 12.45C103.843 11.862 103.633 11.3907 103.223 11.036C102.812 10.6813 102.303 10.504 101.697 10.504C101.146 10.504 100.675 10.6813 100.283 11.036C99.8907 11.3813 99.6574 11.8527 99.5827 12.45H103.853Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M108.405 10.406C108.639 10.014 108.947 9.71068 109.329 9.49602C109.721 9.27202 110.183 9.16002 110.715 9.16002V10.812H110.309C109.684 10.812 109.208 10.9707 108.881 11.288C108.564 11.6054 108.405 12.156 108.405 12.94V17H106.809V9.28602H108.405V10.406Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M118.972 12.954C118.972 13.2433 118.954 13.5047 118.916 13.738H113.022C113.069 14.354 113.298 14.8487 113.708 15.222C114.119 15.5954 114.623 15.782 115.22 15.782C116.079 15.782 116.686 15.4227 117.04 14.704H118.762C118.529 15.4133 118.104 15.9967 117.488 16.454C116.882 16.902 116.126 17.126 115.22 17.126C114.483 17.126 113.82 16.9627 113.232 16.636C112.654 16.3 112.196 15.8334 111.86 15.236C111.534 14.6294 111.37 13.9293 111.37 13.136C111.37 12.3427 111.529 11.6473 111.846 11.05C112.173 10.4434 112.626 9.97668 113.204 9.65002C113.792 9.32335 114.464 9.16002 115.22 9.16002C115.948 9.16002 116.597 9.31868 117.166 9.63602C117.736 9.95335 118.179 10.4014 118.496 10.98C118.814 11.5493 118.972 12.2073 118.972 12.954ZM117.306 12.45C117.297 11.862 117.087 11.3907 116.676 11.036C116.266 10.6813 115.757 10.504 115.15 10.504C114.6 10.504 114.128 10.6813 113.736 11.036C113.344 11.3813 113.111 11.8527 113.036 12.45H117.306Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M124.155 9.16002C124.762 9.16002 125.303 9.28602 125.779 9.53802C126.264 9.79002 126.642 10.1633 126.913 10.658C127.184 11.1527 127.319 11.75 127.319 12.45V17H125.737V12.688C125.737 11.9973 125.564 11.47 125.219 11.106C124.874 10.7327 124.402 10.546 123.805 10.546C123.208 10.546 122.732 10.7327 122.377 11.106C122.032 11.47 121.859 11.9973 121.859 12.688V17H120.263V9.28602H121.859V10.168C122.12 9.85068 122.452 9.60335 122.853 9.42602C123.264 9.24868 123.698 9.16002 124.155 9.16002Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M128.543 13.136C128.543 12.3427 128.701 11.6473 129.019 11.05C129.345 10.4434 129.793 9.97668 130.363 9.65002C130.932 9.32335 131.585 9.16002 132.323 9.16002C133.256 9.16002 134.026 9.38402 134.633 9.83202C135.249 10.2707 135.664 10.9007 135.879 11.722H134.157C134.017 11.3394 133.793 11.0407 133.485 10.826C133.177 10.6113 132.789 10.504 132.323 10.504C131.669 10.504 131.147 10.7373 130.755 11.204C130.372 11.6613 130.181 12.3053 130.181 13.136C130.181 13.9667 130.372 14.6153 130.755 15.082C131.147 15.5487 131.669 15.782 132.323 15.782C133.247 15.782 133.858 15.376 134.157 14.564H135.879C135.655 15.348 135.235 15.9733 134.619 16.44C134.003 16.8973 133.237 17.126 132.323 17.126C131.585 17.126 130.932 16.9627 130.363 16.636C129.793 16.3 129.345 15.8334 129.019 15.236C128.701 14.6294 128.543 13.9293 128.543 13.136Z",
"fill": "#1D2939"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M144.259 12.954C144.259 13.2433 144.241 13.5047 144.203 13.738H138.309C138.356 14.354 138.585 14.8487 138.995 15.222C139.406 15.5954 139.91 15.782 140.507 15.782C141.366 15.782 141.973 15.4227 142.327 14.704H144.049C143.816 15.4133 143.391 15.9967 142.775 16.454C142.169 16.902 141.413 17.126 140.507 17.126C139.77 17.126 139.107 16.9627 138.519 16.636C137.941 16.3 137.483 15.8334 137.147 15.236C136.821 14.6294 136.657 13.9293 136.657 13.136C136.657 12.3427 136.816 11.6473 137.133 11.05C137.46 10.4434 137.913 9.97668 138.491 9.65002C139.079 9.32335 139.751 9.16002 140.507 9.16002C141.235 9.16002 141.884 9.31868 142.453 9.63602C143.023 9.95335 143.466 10.4014 143.783 10.98C144.101 11.5493 144.259 12.2073 144.259 12.954ZM142.593 12.45C142.584 11.862 142.374 11.3907 141.963 11.036C141.553 10.6813 141.044 10.504 140.437 10.504C139.887 10.504 139.415 10.6813 139.023 11.036C138.631 11.3813 138.398 11.8527 138.323 12.45H142.593Z",
"fill": "#1D2939"
},
"children": []
}
]
}
]
},
{
"type": "element",
"name": "defs",
"attributes": {},
"children": [
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint0_linear_9866_6170",
"x1": "2.15214",
"y1": "24.3018",
"x2": "21.2921",
"y2": "0.0988218",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#E9A85E"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#F52B76"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint1_linear_9866_6170",
"x1": "2.06269",
"y1": "24.2294",
"x2": "21.2027",
"y2": "0.028252",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#E9A85E"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#F52B76"
},
"children": []
}
]
},
{
"type": "element",
"name": "linearGradient",
"attributes": {
"id": "paint2_linear_9866_6170",
"x1": "-0.613606",
"y1": "3.843",
"x2": "21.4449",
"y2": "18.7258",
"gradientUnits": "userSpaceOnUse"
},
"children": [
{
"type": "element",
"name": "stop",
"attributes": {
"stop-color": "#6A0CF5"
},
"children": []
},
{
"type": "element",
"name": "stop",
"attributes": {
"offset": "1",
"stop-color": "#AB66F3"
},
"children": []
}
]
},
{
"type": "element",
"name": "clipPath",
"attributes": {
"id": "clip0_9866_6170"
},
"children": [
{
"type": "element",
"name": "rect",
"attributes": {
"width": "152",
"height": "24",
"fill": "white"
},
"children": []
}
]
}
]
}
]
},
"name": "XorbitsInferenceText"
}

View File

@@ -0,0 +1,14 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import * as React from 'react'
import data from './XorbitsInferenceText.json'
import IconBase from '@/app/components/base/icons/IconBase'
import type { IconBaseProps, IconData } from '@/app/components/base/icons/IconBase'
const Icon = React.forwardRef<React.MutableRefObject<SVGElement>, Omit<IconBaseProps, 'data'>>((
props,
ref,
) => <IconBase {...props} ref={ref} data={data as IconData} />)
export default Icon

View File

@@ -21,5 +21,9 @@ export { default as OpenaiGreen } from './OpenaiGreen'
export { default as OpenaiText } from './OpenaiText'
export { default as OpenaiTransparent } from './OpenaiTransparent'
export { default as OpenaiViolet } from './OpenaiViolet'
export { default as OpenllmText } from './OpenllmText'
export { default as Openllm } from './Openllm'
export { default as ReplicateText } from './ReplicateText'
export { default as Replicate } from './Replicate'
export { default as XorbitsInferenceText } from './XorbitsInferenceText'
export { default as XorbitsInference } from './XorbitsInference'

View File

@@ -68,4 +68,4 @@
]
},
"name": "DotsHorizontal"
}
}

View File

@@ -1,5 +1,5 @@
.spin-animation path {
animation: custom 1s linear infinite;
animation: custom 2s linear infinite;
}
@keyframes custom {
@@ -29,13 +29,13 @@
}
.spin-animation path:nth-child(2) {
animation-delay: 0.25s;
}
.spin-animation path:nth-child(3) {
animation-delay: 0.5s;
}
.spin-animation path:nth-child(4) {
.spin-animation path:nth-child(3) {
animation-delay: 1s;
}
.spin-animation path:nth-child(4) {
animation-delay: 2s;
}

View File

@@ -1,9 +1,10 @@
'use client'
import { useBoolean } from 'ahooks'
import React, { FC, useEffect, useState, useRef } from 'react'
import React, { useEffect, useRef, useState } from 'react'
import type { FC } from 'react'
import { createRoot } from 'react-dom/client'
export interface IPortalToFollowElementProps {
type IPortalToFollowElementProps = {
portalElem: React.ReactNode
children: React.ReactNode
controlShow?: number
@@ -14,44 +15,42 @@ const PortalToFollowElement: FC<IPortalToFollowElementProps> = ({
portalElem,
children,
controlShow,
controlHide
controlHide,
}) => {
const [isShowContent, { setTrue: showContent, setFalse: hideContent, toggle: toggleContent }] = useBoolean(false)
const [wrapElem, setWrapElem] = useState<HTMLDivElement | null>(null)
useEffect(() => {
if (controlShow) {
if (controlShow)
showContent()
}
}, [controlShow])
useEffect(() => {
if (controlHide) {
if (controlHide)
hideContent()
}
}, [controlHide])
// todo use click outside hidden
const triggerElemRef = useRef<HTMLElement>(null)
const triggerElemRef = useRef<HTMLDivElement>(null)
const calLoc = () => {
const triggerElem = triggerElemRef.current
if (!triggerElem) {
return {
display: 'none'
display: 'none',
}
}
const {
left: triggerLeft,
top: triggerTop,
height
} = triggerElem.getBoundingClientRect();
height,
} = triggerElem.getBoundingClientRect()
return {
position: 'fixed',
left: triggerLeft,
top: triggerTop + height,
zIndex: 999
zIndex: 999,
}
}
@@ -63,19 +62,20 @@ const PortalToFollowElement: FC<IPortalToFollowElementProps> = ({
root.render(
<div style={style as React.CSSProperties}>
{portalElem}
</div>
</div>,
)
document.body.appendChild(holder)
setWrapElem(holder)
console.log(holder)
} else {
}
else {
wrapElem?.remove?.()
setWrapElem(null)
}
}, [isShowContent])
return (
<div ref={triggerElemRef as any} onClick={toggleContent}>
<div ref={triggerElemRef as React.RefObject<HTMLDivElement>} onClick={toggleContent}>
{children}
</div>
)

Some files were not shown because too many files have changed in this diff Show More