Ver código fonte

Merge branch 'main' into merge_test

陈婉冰 1 ano atrás
pai
commit
54b3603b3b
100 arquivos alterados com 2220 adições e 698 exclusões
  1. 2 2
      .devcontainer/Dockerfile
  2. 1 1
      .devcontainer/devcontainer.json
  3. 2 1
      .devcontainer/post_create_command.sh
  4. 2 2
      .github/actions/setup-poetry/action.yml
  5. 16 25
      .github/pull_request_template.md
  6. 15 6
      .github/workflows/api-tests.yml
  7. 11 4
      .github/workflows/build-push.yml
  8. 5 0
      .github/workflows/db-migration-test.yml
  9. 47 0
      .github/workflows/docker-build.yml
  10. 2 1
      .github/workflows/expose_service_ports.sh
  11. 43 3
      .github/workflows/style.yml
  12. 3 0
      .github/workflows/tool-test-sdks.yaml
  13. 1 0
      .github/workflows/translate-i18n-base-on-english.yml
  14. 17 2
      .github/workflows/vdb-tests.yml
  15. 3 0
      .github/workflows/web-tests.yml
  16. 1 0
      .gitignore
  17. 17 18
      CONTRIBUTING.md
  18. 1 1
      CONTRIBUTING_CN.md
  19. 1 1
      CONTRIBUTING_JA.md
  20. 2 2
      CONTRIBUTING_VI.md
  21. 79 0
      README.md
  22. 20 0
      README_AR.md
  23. 13 0
      README_CN.md
  24. 20 0
      README_ES.md
  25. 20 0
      README_FR.md
  26. 13 0
      README_JA.md
  27. 14 3
      README_KL.md
  28. 13 0
      README_KR.md
  29. 14 1
      README_PT.md
  30. 257 0
      README_SI.md
  31. 13 6
      README_TR.md
  32. 14 1
      README_VI.md
  33. 41 12
      api/.env.example
  34. 97 0
      api/.ruff.toml
  35. 14 13
      api/Dockerfile
  36. 14 13
      api/README.md
  37. 24 91
      api/app.py
  38. 75 152
      api/app_factory.py
  39. 24 27
      api/commands.py
  40. 66 8
      api/configs/app_config.py
  41. 0 5
      api/configs/deploy/__init__.py
  42. 69 21
      api/configs/feature/__init__.py
  43. 35 2
      api/configs/feature/hosted_service/__init__.py
  44. 54 42
      api/configs/middleware/__init__.py
  45. 15 0
      api/configs/middleware/cache/redis_config.py
  46. 3 2
      api/configs/middleware/storage/baidu_obs_storage_config.py
  47. 3 2
      api/configs/middleware/storage/huawei_obs_storage_config.py
  48. 9 0
      api/configs/middleware/storage/opendal_storage_config.py
  49. 3 2
      api/configs/middleware/storage/supabase_storage_config.py
  50. 3 2
      api/configs/middleware/storage/volcengine_tos_storage_config.py
  51. 11 2
      api/configs/middleware/vdb/analyticdb_config.py
  52. 3 2
      api/configs/middleware/vdb/couchbase_config.py
  53. 11 0
      api/configs/middleware/vdb/lindorm_config.py
  54. 6 0
      api/configs/middleware/vdb/milvus_config.py
  55. 3 2
      api/configs/middleware/vdb/myscale_config.py
  56. 3 2
      api/configs/middleware/vdb/vikingdb_config.py
  57. 1 1
      api/configs/packaging/__init__.py
  58. 17 0
      api/configs/remote_settings_sources/__init__.py
  59. 55 0
      api/configs/remote_settings_sources/apollo/__init__.py
  60. 304 0
      api/configs/remote_settings_sources/apollo/client.py
  61. 41 0
      api/configs/remote_settings_sources/apollo/python_3x.py
  62. 51 0
      api/configs/remote_settings_sources/apollo/utils.py
  63. 15 0
      api/configs/remote_settings_sources/base.py
  64. 5 0
      api/configs/remote_settings_sources/enums.py
  65. 3 3
      api/constants/__init__.py
  66. 2 0
      api/constants/languages.py
  67. 2 1
      api/constants/model_template.py
  68. 5 0
      api/controllers/common/errors.py
  69. 1 1
      api/controllers/common/fields.py
  70. 27 0
      api/controllers/common/helpers.py
  71. 95 5
      api/controllers/console/__init__.py
  72. 3 3
      api/controllers/console/admin.py
  73. 14 9
      api/controllers/console/apikey.py
  74. 1 1
      api/controllers/console/app/advanced_prompt_template.py
  75. 1 1
      api/controllers/console/app/agent.py
  76. 3 3
      api/controllers/console/app/annotation.py
  77. 31 65
      api/controllers/console/app/app.py
  78. 90 0
      api/controllers/console/app/app_import.py
  79. 10 6
      api/controllers/console/app/audio.py
  80. 4 5
      api/controllers/console/app/completion.py
  81. 10 9
      api/controllers/console/app/conversation.py
  82. 1 1
      api/controllers/console/app/conversation_variables.py
  83. 2 2
      api/controllers/console/app/generator.py
  84. 3 3
      api/controllers/console/app/message.py
  85. 11 6
      api/controllers/console/app/model_config.py
  86. 6 5
      api/controllers/console/app/ops_trace.py
  87. 6 6
      api/controllers/console/app/site.py
  88. 3 4
      api/controllers/console/app/statistic.py
  89. 46 32
      api/controllers/console/app/workflow.py
  90. 2 2
      api/controllers/console/app/workflow_app_log.py
  91. 2 2
      api/controllers/console/app/workflow_run.py
  92. 2 2
      api/controllers/console/app/workflow_statistic.py
  93. 2 3
      api/controllers/console/app/wraps.py
  94. 4 4
      api/controllers/console/auth/activate.py
  95. 2 2
      api/controllers/console/auth/data_source_bearer_auth.py
  96. 4 5
      api/controllers/console/auth/data_source_oauth.py
  97. 12 0
      api/controllers/console/auth/error.py
  98. 15 5
      api/controllers/console/auth/forgot_password.py
  99. 27 10
      api/controllers/console/auth/login.py
  100. 11 9
      api/controllers/console/auth/oauth.py

+ 2 - 2
.devcontainer/Dockerfile

@@ -1,5 +1,5 @@
-FROM mcr.microsoft.com/devcontainers/python:3.10
+FROM mcr.microsoft.com/devcontainers/python:3.12
 
 # [Optional] Uncomment this section to install additional OS packages.
 # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
-#     && apt-get -y install --no-install-recommends <your-package-list-here>
+#     && apt-get -y install --no-install-recommends <your-package-list-here>

+ 1 - 1
.devcontainer/devcontainer.json

@@ -1,7 +1,7 @@
 // For format details, see https://aka.ms/devcontainer.json. For config options, see the
 // README at: https://github.com/devcontainers/templates/tree/main/src/anaconda
 {
-	"name": "Python 3.10",
+	"name": "Python 3.12",
 	"build": { 
 		"context": "..",
 		"dockerfile": "Dockerfile"

+ 2 - 1
.devcontainer/post_create_command.sh

@@ -7,5 +7,6 @@ echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run
 echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
 echo 'alias start-web="cd /workspaces/dify/web && npm run dev"' >> ~/.bashrc
 echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify up -d"' >> ~/.bashrc
+echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify down"' >> ~/.bashrc
 
-source /home/vscode/.bashrc
+source /home/vscode/.bashrc

+ 2 - 2
.github/actions/setup-poetry/action.yml

@@ -4,11 +4,11 @@ inputs:
   python-version:
     description: Python version to use and the Poetry installed with
     required: true
-    default: '3.10'
+    default: '3.11'
   poetry-version:
     description: Poetry version to set up
     required: true
-    default: '1.8.4'
+    default: '2.0.1'
   poetry-lockfile:
     description: Path to the Poetry lockfile to restore cache from
     required: true

+ 16 - 25
.github/pull_request_template.md

@@ -1,34 +1,25 @@
-# Checklist:
+# Summary
 
-> [!IMPORTANT]  
-> Please review the checklist below before submitting your pull request.
-
-- [ ] Please open an issue before creating a PR or link to an existing issue
-- [ ] I have performed a self-review of my own code
-- [ ] I have commented my code, particularly in hard-to-understand areas
-- [ ] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
-
-# Description
-
-Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue. Close issue syntax: `Fixes #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
+Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.
 
-Fixes 
+> [!Tip]
+> Close issue syntax: `Fixes #<issue number>` or `Resolves #<issue number>`, see [documentation](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) for more details.
 
-## Type of Change
 
-- [ ] Bug fix (non-breaking change which fixes an issue)
-- [ ] New feature (non-breaking change which adds functionality)
-- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
-- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
-- [ ] Improvement, including but not limited to code refactoring, performance optimization, and UI/UX improvement
-- [ ] Dependency upgrade
-
-# Testing Instructions
+# Screenshots
 
-Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration
+| Before | After |
+|--------|-------|
+| ...    | ...   |
 
-- [ ] Test A
-- [ ] Test B
+# Checklist
 
+> [!IMPORTANT]  
+> Please review the checklist below before submitting your pull request.
 
+- [ ] This change requires a documentation update, included: [Dify Document](https://github.com/langgenius/dify-docs)
+- [x] I understand that this PR may be closed in case there was no previous discussion or issues. (This doesn't apply to typos!)
+- [x] I've added a test for each change that was introduced, and I tried as much as possible to make a single atomic change.
+- [x] I've updated the documentation accordingly.
+- [x] I ran `dev/reformat`(backend) and `cd web && npx lint-staged`(frontend) to appease the lint gods
 

+ 15 - 6
.github/workflows/api-tests.yml

@@ -20,13 +20,15 @@ jobs:
     strategy:
       matrix:
         python-version:
-          - "3.10"
           - "3.11"
           - "3.12"
 
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Setup Poetry and Python ${{ matrix.python-version }}
         uses: ./.github/actions/setup-poetry
@@ -43,16 +45,23 @@ jobs:
         run: poetry install -C api --with dev
 
       - name: Check dependencies in pyproject.toml
-        run: poetry run -C api bash dev/pytest/pytest_artifacts.sh
+        run: poetry run -P api bash dev/pytest/pytest_artifacts.sh
 
       - name: Run Unit tests
-        run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh
+        run: poetry run -P api bash dev/pytest/pytest_unit_tests.sh
 
       - name: Run ModelRuntime
-        run: poetry run -C api bash dev/pytest/pytest_model_runtime.sh
+        run: poetry run -P api bash dev/pytest/pytest_model_runtime.sh
+
+      - name: Run dify config tests
+        run: poetry run -P api python dev/pytest/pytest_config_tests.py
 
       - name: Run Tool
-        run: poetry run -C api bash dev/pytest/pytest_tools.sh
+        run: poetry run -P api bash dev/pytest/pytest_tools.sh
+
+      - name: Run mypy
+        run: |
+          poetry run -C api python -m mypy --install-types --non-interactive .
 
       - name: Set up dotenvs
         run: |
@@ -72,4 +81,4 @@ jobs:
             ssrf_proxy
 
       - name: Run Workflow
-        run: poetry run -C api bash dev/pytest/pytest_workflow.sh
+        run: poetry run -P api bash dev/pytest/pytest_workflow.sh

+ 11 - 4
.github/workflows/build-push.yml

@@ -79,10 +79,12 @@ jobs:
           cache-to: type=gha,mode=max,scope=${{ matrix.service_name }}
 
       - name: Export digest
+        env:
+          DIGEST: ${{ steps.build.outputs.digest }}
         run: |
           mkdir -p /tmp/digests
-          digest="${{ steps.build.outputs.digest }}"
-          touch "/tmp/digests/${digest#sha256:}"
+          sanitized_digest=${DIGEST#sha256:}
+          touch "/tmp/digests/${sanitized_digest}"
 
       - name: Upload digest
         uses: actions/upload-artifact@v4
@@ -132,10 +134,15 @@ jobs:
 
       - name: Create manifest list and push
         working-directory: /tmp/digests
+        env:
+          IMAGE_NAME: ${{ env[matrix.image_name_env] }}
         run: |
           docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
-            $(printf '${{ env[matrix.image_name_env] }}@sha256:%s ' *)
+            $(printf "$IMAGE_NAME@sha256:%s " *)
 
       - name: Inspect image
+        env:
+          IMAGE_NAME: ${{ env[matrix.image_name_env] }}
+          IMAGE_VERSION: ${{ steps.meta.outputs.version }}
         run: |
-          docker buildx imagetools inspect ${{ env[matrix.image_name_env] }}:${{ steps.meta.outputs.version }}
+          docker buildx imagetools inspect "$IMAGE_NAME:$IMAGE_VERSION"

+ 5 - 0
.github/workflows/db-migration-test.yml

@@ -19,6 +19,9 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Setup Poetry and Python
         uses: ./.github/actions/setup-poetry
@@ -48,6 +51,8 @@ jobs:
           cp .env.example .env
 
       - name: Run DB Migration
+        env:
+          DEBUG: true
         run: |
           cd api
           poetry run python -m flask upgrade-db

+ 47 - 0
.github/workflows/docker-build.yml

@@ -0,0 +1,47 @@
+name: Build docker image
+
+on:
+  pull_request:
+    branches:
+      - "main"
+    paths:
+      - api/Dockerfile
+      - web/Dockerfile
+
+concurrency:
+  group: docker-build-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+jobs:
+  build-docker:
+    runs-on: ubuntu-latest
+    strategy:
+      matrix:
+        include:
+          - service_name: "api-amd64"
+            platform: linux/amd64
+            context: "api"
+          - service_name: "api-arm64"
+            platform: linux/arm64
+            context: "api"
+          - service_name: "web-amd64"
+            platform: linux/amd64
+            context: "web"
+          - service_name: "web-arm64"
+            platform: linux/arm64
+            context: "web"
+    steps:
+      - name: Set up QEMU
+        uses: docker/setup-qemu-action@v3
+
+      - name: Set up Docker Buildx
+        uses: docker/setup-buildx-action@v3
+
+      - name: Build Docker Image
+        uses: docker/build-push-action@v6
+        with:
+          push: false
+          context: "{{defaultContext}}:${{ matrix.context }}"
+          platforms: ${{ matrix.platform }}
+          cache-from: type=gha
+          cache-to: type=gha,mode=max

+ 2 - 1
.github/workflows/expose_service_ports.sh

@@ -9,5 +9,6 @@ yq eval '.services["pgvecto-rs"].ports += ["5431:5432"]' -i docker/docker-compos
 yq eval '.services["elasticsearch"].ports += ["9200:9200"]' -i docker/docker-compose.yaml
 yq eval '.services.couchbase-server.ports += ["8091-8096:8091-8096"]' -i docker/docker-compose.yaml
 yq eval '.services.couchbase-server.ports += ["11210:11210"]' -i docker/docker-compose.yaml
+yq eval '.services.tidb.ports += ["4000:4000"]' -i docker/tidb/docker-compose.yaml
 
-echo "Ports exposed for sandbox, weaviate, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"
+echo "Ports exposed for sandbox, weaviate, tidb, qdrant, chroma, milvus, pgvector, pgvecto-rs, elasticsearch, couchbase"

+ 43 - 3
.github/workflows/style.yml

@@ -17,6 +17,9 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Check changed files
         id: changed-files
@@ -37,12 +40,13 @@ jobs:
       - name: Ruff check
         if: steps.changed-files.outputs.any_changed == 'true'
         run: |
-          poetry run -C api ruff check ./api
-          poetry run -C api ruff format --check ./api
+          poetry run -C api ruff --version
+          poetry run -C api ruff check ./
+          poetry run -C api ruff format --check ./
 
       - name: Dotenv check
         if: steps.changed-files.outputs.any_changed == 'true'
-        run: poetry run -C api dotenv-linter ./api/.env.example ./web/.env.example
+        run: poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example
 
       - name: Lint hints
         if: failure()
@@ -58,6 +62,9 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Check changed files
         id: changed-files
@@ -81,6 +88,36 @@ jobs:
         if: steps.changed-files.outputs.any_changed == 'true'
         run: yarn run lint
 
+  docker-compose-template:
+    name: Docker Compose Template
+    runs-on: ubuntu-latest
+
+    steps:
+      - name: Checkout code
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
+
+      - name: Check changed files
+        id: changed-files
+        uses: tj-actions/changed-files@v45
+        with:
+          files: |
+            docker/generate_docker_compose
+            docker/.env.example
+            docker/docker-compose-template.yaml
+            docker/docker-compose.yaml
+
+      - name: Generate Docker Compose
+        if: steps.changed-files.outputs.any_changed == 'true'
+        run: |
+          cd docker
+          ./generate_docker_compose
+
+      - name: Check for changes
+        if: steps.changed-files.outputs.any_changed == 'true'
+        run: git diff --exit-code
 
   superlinter:
     name: SuperLinter
@@ -89,6 +126,9 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Check changed files
         id: changed-files

+ 3 - 0
.github/workflows/tool-test-sdks.yaml

@@ -26,6 +26,9 @@ jobs:
 
     steps:
       - uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Use Node.js ${{ matrix.node-version }}
         uses: actions/setup-node@v4

+ 1 - 0
.github/workflows/translate-i18n-base-on-english.yml

@@ -16,6 +16,7 @@ jobs:
       - uses: actions/checkout@v4
         with:
           fetch-depth: 2 # last 2 commits
+          persist-credentials: false
 
       - name: Check for file changes in i18n/en-US
         id: check_files

+ 17 - 2
.github/workflows/vdb-tests.yml

@@ -8,6 +8,8 @@ on:
       - api/core/rag/datasource/**
       - docker/**
       - .github/workflows/vdb-tests.yml
+      - api/poetry.lock
+      - api/pyproject.toml
 
 concurrency:
   group: vdb-tests-${{ github.head_ref || github.run_id }}
@@ -20,13 +22,15 @@ jobs:
     strategy:
       matrix:
         python-version:
-          - "3.10"
           - "3.11"
           - "3.12"
 
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Setup Poetry and Python ${{ matrix.python-version }}
         uses: ./.github/actions/setup-poetry
@@ -50,6 +54,14 @@ jobs:
       - name: Expose Service Ports
         run: sh .github/workflows/expose_service_ports.sh
 
+      - name: Set up Vector Store (TiDB)
+        uses: hoverkraft-tech/compose-action@v2.0.2
+        with:
+          compose-file: docker/tidb/docker-compose.yaml
+          services: |
+            tidb
+            tiflash
+
       - name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase)
         uses: hoverkraft-tech/compose-action@v2.0.2
         with:
@@ -67,5 +79,8 @@ jobs:
             chroma
             elasticsearch
 
+      - name: Check TiDB Ready
+        run: poetry run -P api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
+
       - name: Test Vector Stores
-        run: poetry run -C api bash dev/pytest/pytest_vdb.sh
+        run: poetry run -P api bash dev/pytest/pytest_vdb.sh

+ 3 - 0
.github/workflows/web-tests.yml

@@ -22,6 +22,9 @@ jobs:
     steps:
       - name: Checkout code
         uses: actions/checkout@v4
+        with:
+          fetch-depth: 0
+          persist-credentials: false
 
       - name: Check changed files
         id: changed-files

+ 1 - 0
.gitignore

@@ -163,6 +163,7 @@ docker/volumes/db/data/*
 docker/volumes/redis/data/*
 docker/volumes/weaviate/*
 docker/volumes/qdrant/*
+docker/tidb/volumes/*
 docker/volumes/etcd/*
 docker/volumes/minio/*
 docker/volumes/milvus/*

+ 17 - 18
CONTRIBUTING.md

@@ -1,6 +1,8 @@
+# CONTRIBUTING
+
 So you're looking to contribute to Dify - that's awesome, we can't wait to see what you do. As a startup with limited headcount and funding, we have grand ambitions to design the most intuitive workflow for building and managing LLM applications. Any help from the community counts, truly.
 
-We need to be nimble and ship fast given where we are, but we also want to make sure that contributors like you get as smooth an experience at contributing as possible. We've assembled this contribution guide for that purpose, aiming at getting you familiarized with the codebase & how we work with contributors, so you could quickly jump to the fun part. 
+We need to be nimble and ship fast given where we are, but we also want to make sure that contributors like you get as smooth an experience at contributing as possible. We've assembled this contribution guide for that purpose, aiming at getting you familiarized with the codebase & how we work with contributors, so you could quickly jump to the fun part.
 
 This guide, like Dify itself, is a constant work in progress. We highly appreciate your understanding if at times it lags behind the actual project, and welcome any feedback for us to improve.
 
@@ -10,14 +12,12 @@ In terms of licensing, please take a minute to read our short [License and Contr
 
 [Find](https://github.com/langgenius/dify/issues?q=is:issue+is:open) an existing issue, or [open](https://github.com/langgenius/dify/issues/new/choose) a new one. We categorize issues into 2 types:
 
-### Feature requests:
+### Feature requests
 
 * If you're opening a new feature request, we'd like you to explain what the proposed feature achieves, and include as much context as possible. [@perzeusss](https://github.com/perzeuss) has made a solid [Feature Request Copilot](https://udify.app/chat/MK2kVSnw1gakVwMX) that helps you draft out your needs. Feel free to give it a try.
 
 * If you want to pick one up from the existing issues, simply drop a comment below it saying so.
 
-  
-
   A team member working in the related direction will be looped in. If all looks good, they will give the go-ahead for you to start coding. We ask that you hold off working on the feature until then, so none of your work goes to waste should we propose changes.
 
   Depending on whichever area the proposed feature falls under, you might talk to different team members. Here's rundown of the areas each our team members are working on at the moment:
@@ -40,7 +40,7 @@ In terms of licensing, please take a minute to read our short [License and Contr
   | Non-core features and minor enhancements                     | Low Priority    |
   | Valuable but not immediate                                   | Future-Feature  |
 
-### Anything else (e.g. bug report, performance optimization, typo correction):
+### Anything else (e.g. bug report, performance optimization, typo correction)
 
 * Start coding right away.
 
@@ -52,7 +52,6 @@ In terms of licensing, please take a minute to read our short [License and Contr
   | Non-critical bugs, performance boosts                        | Medium Priority |
   | Minor fixes (typos, confusing but working UI)                | Low Priority    |
 
-
 ## Installing
 
 Here are the steps to set up Dify for development:
@@ -63,7 +62,7 @@ Here are the steps to set up Dify for development:
 
  Clone the forked repository from your terminal:
 
-```
+```shell
 git clone git@github.com:<github_username>/dify.git
 ```
 
@@ -71,11 +70,11 @@ git clone git@github.com:<github_username>/dify.git
 
 Dify requires the following dependencies to build, make sure they're installed on your system:
 
-- [Docker](https://www.docker.com/)
-- [Docker Compose](https://docs.docker.com/compose/install/)
-- [Node.js v18.x (LTS)](http://nodejs.org)
-- [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
-- [Python](https://www.python.org/) version 3.10.x
+* [Docker](https://www.docker.com/)
+* [Docker Compose](https://docs.docker.com/compose/install/)
+* [Node.js v18.x (LTS)](http://nodejs.org)
+* [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
+* [Python](https://www.python.org/) version 3.11.x or 3.12.x
 
 ### 4. Installations
 
@@ -85,7 +84,7 @@ Check the [installation FAQ](https://docs.dify.ai/learn-more/faq/install-faq) fo
 
 ### 5. Visit dify in your browser
 
-To validate your set up, head over to [http://localhost:3000](http://localhost:3000) (the default, or your self-configured URL and port) in your browser. You should now see Dify up and running. 
+To validate your set up, head over to [http://localhost:3000](http://localhost:3000) (the default, or your self-configured URL and port) in your browser. You should now see Dify up and running.
 
 ## Developing
 
@@ -97,9 +96,9 @@ To help you quickly navigate where your contribution fits, a brief, annotated ou
 
 ### Backend
 
-Dify’s backend is written in Python using [Flask](https://flask.palletsprojects.com/en/3.0.x/). It uses [SQLAlchemy](https://www.sqlalchemy.org/) for ORM and [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) for task queueing. Authorization logic goes via Flask-login. 
+Dify’s backend is written in Python using [Flask](https://flask.palletsprojects.com/en/3.0.x/). It uses [SQLAlchemy](https://www.sqlalchemy.org/) for ORM and [Celery](https://docs.celeryq.dev/en/stable/getting-started/introduction.html) for task queueing. Authorization logic goes via Flask-login.
 
-```
+```text
 [api/]
 ├── constants             // Constant settings used throughout code base.
 ├── controllers           // API route definitions and request handling logic.           
@@ -121,7 +120,7 @@ Dify’s backend is written in Python using [Flask](https://flask.palletsproject
 
 The website is bootstrapped on [Next.js](https://nextjs.org/) boilerplate in Typescript and uses [Tailwind CSS](https://tailwindcss.com/) for styling. [React-i18next](https://react.i18next.com/) is used for internationalization.
 
-```
+```text
 [web/]
 ├── app                   // layouts, pages, and components
 │   ├── (commonLayout)    // common layout used throughout the app
@@ -149,10 +148,10 @@ The website is bootstrapped on [Next.js](https://nextjs.org/) boilerplate in Typ
 
 ## Submitting your PR
 
-At last, time to open a pull request (PR) to our repo. For major features, we first merge them into the `deploy/dev` branch for testing, before they go into the `main` branch. If you run into issues like merge conflicts or don't know how to open a pull request, check out [GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests). 
+At last, time to open a pull request (PR) to our repo. For major features, we first merge them into the `deploy/dev` branch for testing, before they go into the `main` branch. If you run into issues like merge conflicts or don't know how to open a pull request, check out [GitHub's pull request tutorial](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests).
 
 And that's it! Once your PR is merged, you will be featured as a contributor in our [README](https://github.com/langgenius/dify/blob/main/README.md).
 
 ## Getting Help
 
-If you ever get stuck or got a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat. 
+If you ever get stuck or got a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat.

+ 1 - 1
CONTRIBUTING_CN.md

@@ -71,7 +71,7 @@ Dify 依赖以下工具和库:
 - [Docker Compose](https://docs.docker.com/compose/install/)
 - [Node.js v18.x (LTS)](http://nodejs.org)
 - [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
-- [Python](https://www.python.org/) version 3.10.x
+- [Python](https://www.python.org/) version 3.11.x or 3.12.x
 
 ### 4. 安装
 

+ 1 - 1
CONTRIBUTING_JA.md

@@ -74,7 +74,7 @@ Dify を構築するには次の依存関係が必要です。それらがシス
 - [Docker Compose](https://docs.docker.com/compose/install/)
 - [Node.js v18.x (LTS)](http://nodejs.org)
 - [npm](https://www.npmjs.com/) version 8.x.x or [Yarn](https://yarnpkg.com/)
-- [Python](https://www.python.org/) version 3.10.x
+- [Python](https://www.python.org/) version 3.11.x or 3.12.x
 
 ### 4. インストール
 

+ 2 - 2
CONTRIBUTING_VI.md

@@ -73,7 +73,7 @@ Dify yêu cầu các phụ thuộc sau để build, hãy đảm bảo chúng đ
 - [Docker Compose](https://docs.docker.com/compose/install/)
 - [Node.js v18.x (LTS)](http://nodejs.org)
 - [npm](https://www.npmjs.com/) phiên bản 8.x.x hoặc [Yarn](https://yarnpkg.com/)
-- [Python](https://www.python.org/) phiên bản 3.10.x
+- [Python](https://www.python.org/) phiên bản 3.11.x hoặc 3.12.x
 
 ### 4. Cài đặt
 
@@ -153,4 +153,4 @@ Và thế là xong! Khi PR của bạn được merge, bạn sẽ được giớ
 
 ## Nhận trợ giúp
 
-Nếu bạn gặp khó khăn hoặc có câu hỏi cấp bách trong quá trình đóng góp, hãy đặt câu hỏi của bạn trong vấn đề GitHub liên quan, hoặc tham gia [Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi để trò chuyện nhanh chóng.
+Nếu bạn gặp khó khăn hoặc có câu hỏi cấp bách trong quá trình đóng góp, hãy đặt câu hỏi của bạn trong vấn đề GitHub liên quan, hoặc tham gia [Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi để trò chuyện nhanh chóng.

+ 79 - 0
README.md

@@ -19,9 +19,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="join Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -102,6 +108,72 @@ Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-host
 **7. Backend-as-a-Service**: 
   All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic.
 
+## Feature Comparison
+<table style="width: 100%;">
+  <tr>
+    <th align="center">Feature</th>
+    <th align="center">Dify.AI</th>
+    <th align="center">LangChain</th>
+    <th align="center">Flowise</th>
+    <th align="center">OpenAI Assistants API</th>
+  </tr>
+  <tr>
+    <td align="center">Programming Approach</td>
+    <td align="center">API + App-oriented</td>
+    <td align="center">Python Code</td>
+    <td align="center">App-oriented</td>
+    <td align="center">API-oriented</td>
+  </tr>
+  <tr>
+    <td align="center">Supported LLMs</td>
+    <td align="center">Rich Variety</td>
+    <td align="center">Rich Variety</td>
+    <td align="center">Rich Variety</td>
+    <td align="center">OpenAI-only</td>
+  </tr>
+  <tr>
+    <td align="center">RAG Engine</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+  </tr>
+  <tr>
+    <td align="center">Agent</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">✅</td>
+  </tr>
+  <tr>
+    <td align="center">Workflow</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Observability</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Enterprise Feature (SSO/Access control)</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Local Deployment</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+  </tr>
+</table>
 
 ## Using Dify
 
@@ -144,6 +216,13 @@ Deploy Dify to Cloud Platform with a single click using [terraform](https://www.
 ##### Google Cloud
 - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### Using AWS CDK for Deployment
+
+Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Contributing
 
 For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). 

+ 20 - 0
README_AR.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="join Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -187,6 +193,13 @@ docker compose up -d
 ##### Google Cloud
 - [Google Cloud Terraform بواسطة @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### استخدام AWS CDK للنشر
+
+انشر Dify على AWS باستخدام [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK بواسطة @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## المساهمة
 
 لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا. 
@@ -219,3 +232,10 @@ docker compose up -d
 ## الرخصة
 
 هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية.
+## الكشف عن الأمان
+
+لحماية خصوصيتك، يرجى تجنب نشر مشكلات الأمان على GitHub. بدلاً من ذلك، أرسل أسئلتك إلى security@dify.ai وسنقدم لك إجابة أكثر تفصيلاً.
+
+## الرخصة
+
+هذا المستودع متاح تحت [رخصة البرنامج الحر Dify](LICENSE)، والتي تعتبر بشكل أساسي Apache 2.0 مع بعض القيود الإضافية.

+ 13 - 0
README_CN.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="join Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -210,6 +216,13 @@ docker compose up -d
 ##### Google Cloud
 - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### 使用 AWS CDK 部署
+
+使用 [CDK](https://aws.amazon.com/cdk/) 将 Dify 部署到 AWS
+
+##### AWS 
+- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Star History
 
 [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)

+ 20 - 0
README_ES.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat en Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="join Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="seguir en X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="seguir en LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Descargas de Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -212,6 +218,13 @@ Despliega Dify en una plataforma en la nube con un solo clic utilizando [terrafo
 ##### Google Cloud
 - [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### Usando AWS CDK para el Despliegue
+
+Despliegue Dify en AWS usando [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Contribuir
 
 Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). 
@@ -245,3 +258,10 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En
 ## Licencia
 
 Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.
+## Divulgación de Seguridad
+
+Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada.
+
+## Licencia
+
+Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales.

+ 20 - 0
README_FR.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat sur Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="join Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="suivre sur X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="suivre sur LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Tirages Docker" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -210,6 +216,13 @@ Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](http
 ##### Google Cloud
 - [Google Cloud Terraform par @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### Utilisation d'AWS CDK pour le déploiement
+
+Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK par @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Contribuer
 
 Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). 
@@ -243,3 +256,10 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de
 ## Licence
 
 Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.
+## Divulgation de sécurité
+
+Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée.
+
+## Licence
+
+Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires.

+ 13 - 0
README_JA.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="Discordでチャット"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="X(Twitter)でフォロー"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="LinkedInでフォロー"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -209,6 +215,13 @@ docker compose up -d
 ##### Google Cloud
 - [@sotazumによるGoogle Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### AWS CDK を使用したデプロイ
+
+[CDK](https://aws.amazon.com/cdk/) を使用して、DifyをAWSにデプロイします
+
+##### AWS 
+- [@KevinZhaoによるAWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## 貢献
 
 コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)を参照してください。

+ 14 - 3
README_KL.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Follow Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -81,9 +87,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
 
 ## Feature Comparison
 <table style="width: 100%;">
-  <tr
-
->
+  <tr>
     <th align="center">Feature</th>
     <th align="center">Dify.AI</th>
     <th align="center">LangChain</th>
@@ -210,6 +214,13 @@ wa'logh nIqHom neH ghun deployment toy'wI' [terraform](https://www.terraform.io/
 ##### Google Cloud
 - [Google Cloud Terraform qachlot @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### AWS CDK atorlugh pilersitsineq
+
+wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo'laH.
+
+##### AWS 
+- [AWS CDK qachlot @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Contributing
 
 For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). 

+ 13 - 0
README_KR.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Follow Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -202,6 +208,13 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
 ##### Google Cloud
 - [sotazum의 Google Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### AWS CDK를 사용한 배포
+
+[CDK](https://aws.amazon.com/cdk/)를 사용하여 AWS에 Dify 배포
+
+##### AWS 
+- [KevinZhao의 AWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## 기여
 
 코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.

+ 14 - 1
README_PT.md

@@ -19,9 +19,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat on Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Follow Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -208,6 +214,13 @@ Implante o Dify na Plataforma Cloud com um único clique usando [terraform](http
 ##### Google Cloud
 - [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### Usando AWS CDK para Implantação
+
+Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Contribuindo
 
 Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). 
@@ -238,4 +251,4 @@ Para proteger sua privacidade, evite postar problemas de segurança no GitHub. E
 
 ## Licença
 
-Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais.
+Este repositório está disponível sob a [Licença de Código Aberto Dify](LICENSE), que é essencialmente Apache 2.0 com algumas restrições adicionais.

+ 257 - 0
README_SI.md

@@ -0,0 +1,257 @@
+![cover-v5-optimized](https://github.com/langgenius/dify/assets/13230914/f9e19af5-61ba-4119-b926-d10c4c06ebab)
+
+<p align="center">
+  📌 <a href="https://dify.ai/blog/introducing-dify-workflow-file-upload-a-demo-on-ai-podcast">Predstavljamo nalaganje datotek Dify Workflow: znova ustvarite Google NotebookLM Podcast</a>
+</p>
+
+<p align="center">
+  <a href="https://cloud.dify.ai">Dify Cloud</a> ·
+  <a href="https://docs.dify.ai/getting-started/install-self-hosted">Samostojno gostovanje</a> ·
+  <a href="https://docs.dify.ai">Dokumentacija</a> ·
+  <a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Povpraševanje za podjetja</a>
+</p>
+
+<p align="center">
+    <a href="https://dify.ai" target="_blank">
+        <img alt="Static Badge" src="https://img.shields.io/badge/Product-F04438"></a>
+    <a href="https://dify.ai/pricing" target="_blank">
+        <img alt="Static Badge" src="https://img.shields.io/badge/free-pricing?logo=free&color=%20%23155EEF&label=pricing&labelColor=%20%23528bff"></a>
+    <a href="https://discord.gg/FngNHpbcY7" target="_blank">
+        <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
+            alt="chat on Discord"></a>
+    <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
+        <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
+            alt="follow on X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="follow on LinkedIn"></a>
+    <a href="https://hub.docker.com/u/langgenius" target="_blank">
+        <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
+    <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
+        <img alt="Commits last month" src="https://img.shields.io/github/commit-activity/m/langgenius/dify?labelColor=%20%2332b583&color=%20%2312b76a"></a>
+    <a href="https://github.com/langgenius/dify/" target="_blank">
+        <img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
+    <a href="https://github.com/langgenius/dify/discussions/" target="_blank">
+        <img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
+</p>
+
+<p align="center">
+  <a href="./README.md"><img alt="README in English" src="https://img.shields.io/badge/English-d9d9d9"></a>
+  <a href="./README_CN.md"><img alt="简体中文版自述文件" src="https://img.shields.io/badge/简体中文-d9d9d9"></a>
+  <a href="./README_JA.md"><img alt="日本語のREADME" src="https://img.shields.io/badge/日本語-d9d9d9"></a>
+  <a href="./README_ES.md"><img alt="README en Español" src="https://img.shields.io/badge/Español-d9d9d9"></a>
+  <a href="./README_FR.md"><img alt="README en Français" src="https://img.shields.io/badge/Français-d9d9d9"></a>
+  <a href="./README_KL.md"><img alt="README tlhIngan Hol" src="https://img.shields.io/badge/Klingon-d9d9d9"></a>
+  <a href="./README_KR.md"><img alt="README in Korean" src="https://img.shields.io/badge/한국어-d9d9d9"></a>
+  <a href="./README_AR.md"><img alt="README بالعربية" src="https://img.shields.io/badge/العربية-d9d9d9"></a>
+  <a href="./README_TR.md"><img alt="Türkçe README" src="https://img.shields.io/badge/Türkçe-d9d9d9"></a>
+  <a href="./README_VI.md"><img alt="README Tiếng Việt" src="https://img.shields.io/badge/Ti%E1%BA%BFng%20Vi%E1%BB%87t-d9d9d9"></a>
+  <a href="./README_SI.md"><img alt="README Slovenščina" src="https://img.shields.io/badge/Sloven%C5%A1%C4%8Dina-d9d9d9"></a>
+</p>
+
+
+Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. 
+
+## Hitri začetek
+> Preden namestite Dify, se prepričajte, da vaša naprava izpolnjuje naslednje minimalne sistemske zahteve:
+> 
+>- CPU >= 2 Core
+>- RAM >= 4 GiB
+
+</br>
+
+Najlažji način za zagon strežnika Dify je prek docker compose . Preden zaženete Dify z naslednjimi ukazi, se prepričajte, da sta Docker in Docker Compose nameščena na vašem računalniku:
+
+```bash
+cd dify
+cd docker
+cp .env.example .env
+docker compose up -d
+```
+
+Po zagonu lahko dostopate do nadzorne plošče Dify v brskalniku na [http://localhost/install](http://localhost/install) in začnete postopek inicializacije.
+
+#### Iskanje pomoči
+Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) če naletite na težave pri nastavitvi Dify. Če imate še vedno težave, se obrnite na [skupnost ali nas](#community--contact).
+
+> Če želite prispevati k Difyju ali narediti dodaten razvoj, glejte naš vodnik za [uvajanje iz izvorne kode](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code)
+
+## Ključne značilnosti
+**1. Potek dela**: 
+  Zgradite in preizkusite zmogljive poteke dela AI na vizualnem platnu, pri čemer izkoristite vse naslednje funkcije in več.
+
+
+  https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa
+
+
+
+**2. Celovita podpora za modele**: 
+  Brezhibna integracija s stotinami lastniških/odprtokodnih LLM-jev ducatov ponudnikov sklepanja in samostojnih rešitev, ki pokrivajo GPT, Mistral, Llama3 in vse modele, združljive z API-jem OpenAI. Celoten seznam podprtih ponudnikov modelov najdete [tukaj](https://docs.dify.ai/getting-started/readme/model-providers).
+
+![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
+
+
+**3. Prompt IDE**: 
+  intuitivni vmesnik za ustvarjanje pozivov, primerjavo zmogljivosti modela in dodajanje dodatnih funkcij, kot je pretvorba besedila v govor, aplikaciji, ki temelji na klepetu. 
+
+**4. RAG Pipeline**: 
+  E Obsežne zmogljivosti RAG, ki pokrivajo vse od vnosa dokumenta do priklica, s podporo za ekstrakcijo besedila iz datotek PDF, PPT in drugih običajnih formatov dokumentov.
+
+**5. Agent capabilities**: 
+  definirate lahko agente, ki temeljijo na klicanju funkcij LLM ali ReAct, in dodate vnaprej izdelana orodja ali orodja po meri za agenta. Dify ponuja več kot 50 vgrajenih orodij za agente AI, kot so Google Search, DALL·E, Stable Diffusion in WolframAlpha.
+
+**6. LLMOps**: 
+  Spremljajte in analizirajte dnevnike aplikacij in učinkovitost skozi čas. Pozive, nabore podatkov in modele lahko nenehno izboljšujete na podlagi proizvodnih podatkov in opomb.
+
+**7. Backend-as-a-Service**: 
+  AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko.
+
+## Primerjava Funkcij
+
+<table style="width: 100%;">
+  <tr>
+    <th align="center">Funkcija</th>
+    <th align="center">Dify.AI</th>
+    <th align="center">LangChain</th>
+    <th align="center">Flowise</th>
+    <th align="center">OpenAI Assistants API</th>
+  </tr>
+  <tr>
+    <td align="center">Programski pristop</td>
+    <td align="center">API + usmerjeno v aplikacije</td>
+    <td align="center">Python koda</td>
+    <td align="center">Usmerjeno v aplikacije</td>
+    <td align="center">Usmerjeno v API</td>
+  </tr>
+  <tr>
+    <td align="center">Podprti LLM-ji</td>
+    <td align="center">Bogata izbira</td>
+    <td align="center">Bogata izbira</td>
+    <td align="center">Bogata izbira</td>
+    <td align="center">Samo OpenAI</td>
+  </tr>
+  <tr>
+    <td align="center">RAG pogon</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+  </tr>
+  <tr>
+    <td align="center">Agent</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">✅</td>
+  </tr>
+  <tr>
+    <td align="center">Potek dela</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Spremljanje</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Funkcija za podjetja (SSO/nadzor dostopa)</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+    <td align="center">❌</td>
+  </tr>
+  <tr>
+    <td align="center">Lokalna namestitev</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">✅</td>
+    <td align="center">❌</td>
+  </tr>
+</table>
+
+## Uporaba Dify
+
+- **Cloud </br>**
+Gostimo storitev Dify Cloud za vsakogar, ki jo lahko preizkusite brez nastavitev. Zagotavlja vse zmožnosti različice za samostojno namestitev in vključuje 200 brezplačnih klicev GPT-4 v načrtu peskovnika.
+
+- **Self-hosting Dify Community Edition</br>**
+Hitro zaženite Dify v svojem okolju s tem [začetnim vodnikom](#quick-start) . Za dodatne reference in podrobnejša navodila uporabite našo [dokumentacijo](https://docs.dify.ai) .
+
+
+- **Dify za podjetja/organizacije</br>**
+Ponujamo dodatne funkcije, osredotočene na podjetja. Zabeležite svoja vprašanja prek tega klepetalnega robota ali nam pošljite e-pošto, da se pogovorimo o potrebah podjetja. </br>
+  > Za novoustanovljena podjetja in mala podjetja, ki uporabljajo AWS, si oglejte Dify Premium na AWS Marketplace in ga z enim klikom uvedite v svoj AWS VPC. To je cenovno ugodna ponudba AMI z možnostjo ustvarjanja aplikacij z logotipom in blagovno znamko po meri.
+
+
+## Staying ahead
+
+Star Dify on GitHub and be instantly notified of new releases.
+
+![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4)
+
+
+## Napredne nastavitve
+
+Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj .
+
+Če želite konfigurirati visoko razpoložljivo nastavitev, so na voljo Helm Charts in datoteke YAML, ki jih prispeva skupnost, ki omogočajo uvedbo Difyja v Kubernetes.
+
+- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
+- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
+- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
+
+#### Uporaba Terraform za uvajanje
+
+namestite Dify v Cloud Platform z enim klikom z uporabo [terraform](https://www.terraform.io/)
+
+##### Azure Global
+- [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform)
+
+##### Google Cloud
+- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
+
+#### Uporaba AWS CDK za uvajanje
+
+Uvedite Dify v AWS z uporabo [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
+## Prispevam
+
+Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. 
+
+
+
+> Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord .
+
+## Skupnost in stik
+
+* [Github Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj.
+* [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
+* [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
+* [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
+
+**Contributors**
+
+<a href="https://github.com/langgenius/dify/graphs/contributors">
+  <img src="https://contrib.rocks/image?repo=langgenius/dify" />
+</a>
+
+## Star history
+
+[![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date)
+
+
+## Varnostno razkritje
+
+Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj na GitHub. Namesto tega pošljite vprašanja na security@dify.ai in zagotovili vam bomo podrobnejši odgovor.
+
+## Licenca
+
+To skladišče je na voljo pod [odprtokodno licenco Dify](LICENSE) , ki je v bistvu Apache 2.0 z nekaj dodatnimi omejitvami.

+ 13 - 6
README_TR.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="Discord'da sohbet et"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Follow Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="X(Twitter)'da takip et"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="LinkedIn'da takip et"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Çekmeleri" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -59,8 +65,6 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
 ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3)
 
 
-Özür dilerim, haklısınız. Daha anlamlı ve akıcı bir çeviri yapmaya çalışayım. İşte güncellenmiş çeviri:
-
 **3. Prompt IDE**: 
   Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz.
 
@@ -147,8 +151,6 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi
 ## Dify'ı Kullanma
 
 - **Cloud </br>**
-İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
-- 
 Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir.
 
 - **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma</br>**
@@ -174,8 +176,6 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun.
 >- RAM >= 4GB
 
 </br>
-İşte verdiğiniz metnin Türkçe çevirisi, kod bloğu içinde:
-
 Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun:
 
 ```bash
@@ -208,6 +208,13 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter
 ##### Google Cloud
 - [Google Cloud Terraform tarafından @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### AWS CDK ile Dağıtım
+
+[CDK](https://aws.amazon.com/cdk/) kullanarak Dify'ı AWS'ye dağıtın
+
+##### AWS 
+- [AWS CDK tarafından @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Katkıda Bulunma
 
 Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.

+ 14 - 1
README_VI.md

@@ -15,9 +15,15 @@
     <a href="https://discord.gg/FngNHpbcY7" target="_blank">
         <img src="https://img.shields.io/discord/1082486657678311454?logo=discord&labelColor=%20%235462eb&logoColor=%20%23f5f5f5&color=%20%235462eb"
             alt="chat trên Discord"></a>
+    <a href="https://reddit.com/r/difyai" target="_blank">  
+        <img src="https://img.shields.io/reddit/subreddit-subscribers/difyai?style=plastic&logo=reddit&label=r%2Fdifyai&labelColor=white"
+            alt="Follow Reddit"></a>
     <a href="https://twitter.com/intent/follow?screen_name=dify_ai" target="_blank">
         <img src="https://img.shields.io/twitter/follow/dify_ai?logo=X&color=%20%23f5f5f5"
             alt="theo dõi trên X(Twitter)"></a>
+    <a href="https://www.linkedin.com/company/langgenius/" target="_blank">
+        <img src="https://custom-icon-badges.demolab.com/badge/LinkedIn-0A66C2?logo=linkedin-white&logoColor=fff"
+            alt="theo dõi trên LinkedIn"></a>
     <a href="https://hub.docker.com/u/langgenius" target="_blank">
         <img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/langgenius/dify-web?labelColor=%20%23FDB062&color=%20%23f79009"></a>
     <a href="https://github.com/langgenius/dify/graphs/commit-activity" target="_blank">
@@ -204,6 +210,13 @@ Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột b
 ##### Google Cloud
 - [Google Cloud Terraform bởi @sotazum](https://github.com/DeNA/dify-google-cloud-terraform)
 
+#### Sử dụng AWS CDK để Triển khai
+
+Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/)
+
+##### AWS 
+- [AWS CDK bởi @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
+
 ## Đóng góp
 
 Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. 
@@ -235,4 +248,4 @@ Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột b
 
 ## Giấy phép
 
-Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.
+Kho lưu trữ này có sẵn theo [Giấy phép Mã nguồn Mở Dify](LICENSE), về cơ bản là Apache 2.0 với một vài hạn chế bổ sung.

+ 41 - 12
api/.env.example

@@ -23,6 +23,9 @@ FILES_ACCESS_TIMEOUT=300
 # Access token expiration time in minutes
 ACCESS_TOKEN_EXPIRE_MINUTES=60
 
+# Refresh token expiration time in days
+REFRESH_TOKEN_EXPIRE_DAYS=30
+
 # celery configuration
 CELERY_BROKER_URL=redis://:difyai123456@localhost:16379/1
 
@@ -42,6 +45,11 @@ REDIS_SENTINEL_USERNAME=
 REDIS_SENTINEL_PASSWORD=
 REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
 
+# redis Cluster configuration.
+REDIS_USE_CLUSTERS=false
+REDIS_CLUSTERS=
+REDIS_CLUSTERS_PASSWORD=
+
 # PostgreSQL database configuration
 DB_USERNAME=postgres
 DB_PASSWORD=difyai123456
@@ -51,20 +59,27 @@ DB_DATABASE=dify
 
 # Storage configuration
 # use for store upload files, private keys...
-# storage type: local, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
-STORAGE_TYPE=local
-STORAGE_LOCAL_PATH=storage
+# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase
+STORAGE_TYPE=opendal
+
+# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal
+OPENDAL_SCHEME=fs
+OPENDAL_FS_ROOT=storage
+
+# S3 Storage configuration
 S3_USE_AWS_MANAGED_IAM=false
-S3_ENDPOINT=https://your-bucket-name.storage.s3.clooudflare.com
+S3_ENDPOINT=https://your-bucket-name.storage.s3.cloudflare.com
 S3_BUCKET_NAME=your-bucket-name
 S3_ACCESS_KEY=your-access-key
 S3_SECRET_KEY=your-secret-key
 S3_REGION=your-region
+
 # Azure Blob Storage configuration
 AZURE_BLOB_ACCOUNT_NAME=your-account-name
 AZURE_BLOB_ACCOUNT_KEY=your-account-key
-AZURE_BLOB_CONTAINER_NAME=yout-container-name
+AZURE_BLOB_CONTAINER_NAME=your-container-name
 AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
+
 # Aliyun oss Storage configuration
 ALIYUN_OSS_BUCKET_NAME=your-bucket-name
 ALIYUN_OSS_ACCESS_KEY=your-access-key
@@ -74,8 +89,9 @@ ALIYUN_OSS_AUTH_VERSION=v1
 ALIYUN_OSS_REGION=your-region
 # Don't start with '/'. OSS doesn't support leading slash in object names.
 ALIYUN_OSS_PATH=your-path
+
 # Google Storage configuration
-GOOGLE_STORAGE_BUCKET_NAME=yout-bucket-name
+GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
 GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=your-google-service-account-json-base64-string
 
 # Tencent COS Storage configuration
@@ -120,8 +136,8 @@ SUPABASE_URL=your-server-url
 WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
 CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
 
-
-# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase
+# Vector database configuration
+# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase
 VECTOR_STORE=weaviate
 
 # Weaviate configuration
@@ -234,6 +250,10 @@ ANALYTICDB_ACCOUNT=testaccount
 ANALYTICDB_PASSWORD=testpassword
 ANALYTICDB_NAMESPACE=dify
 ANALYTICDB_NAMESPACE_PASSWORD=difypassword
+ANALYTICDB_HOST=gp-test.aliyuncs.com
+ANALYTICDB_PORT=5432
+ANALYTICDB_MIN_CONNECTION=1
+ANALYTICDB_MAX_CONNECTION=5
 
 # OpenSearch configuration
 OPENSEARCH_HOST=127.0.0.1
@@ -268,6 +288,7 @@ VIKINGDB_SOCKET_TIMEOUT=30
 LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
 LINDORM_USERNAME=admin
 LINDORM_PASSWORD=admin
+USING_UGC_INDEX=False
 
 # OceanBase Vector configuration
 OCEANBASE_VECTOR_HOST=127.0.0.1
@@ -286,8 +307,7 @@ UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
 UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
 
 # Model configuration
-MULTIMODAL_SEND_IMAGE_FORMAT=base64
-MULTIMODAL_SEND_VIDEO_FORMAT=base64
+MULTIMODAL_SEND_FORMAT=base64
 PROMPT_GENERATION_MAX_TOKENS=512
 CODE_GENERATION_MAX_TOKENS=1024
 
@@ -320,6 +340,7 @@ NOTION_INTERNAL_SECRET=you-internal-secret
 ETL_TYPE=dify
 UNSTRUCTURED_API_URL=
 UNSTRUCTURED_API_KEY=
+SCARF_NO_ANALYTICS=true
 
 #ssrf
 SSRF_PROXY_HTTP_URL=
@@ -371,14 +392,17 @@ LOG_FILE_BACKUP_COUNT=5
 LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
 # Log Timezone
 LOG_TZ=UTC
+# Log format
+LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s
 
 # Indexing configuration
-INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=1000
+INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
 
 # Workflow runtime configuration
 WORKFLOW_MAX_EXECUTION_STEPS=500
 WORKFLOW_MAX_EXECUTION_TIME=1200
 WORKFLOW_CALL_MAX_DEPTH=5
+WORKFLOW_PARALLEL_DEPTH_LIMIT=3
 MAX_VARIABLE_SIZE=204800
 
 # App configuration
@@ -401,4 +425,9 @@ POSITION_PROVIDER_EXCLUDES=
 # Reset password token expiry minutes
 RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
 
-CREATE_TIDB_SERVICE_JOB_ENABLED=false
+CREATE_TIDB_SERVICE_JOB_ENABLED=false
+
+# Maximum number of submitted thread count in a ThreadPool for parallel node execution
+MAX_SUBMIT_COUNT=100
+# Lockout duration in seconds
+LOGIN_LOCKOUT_DURATION=86400

+ 97 - 0
api/.ruff.toml

@@ -0,0 +1,97 @@
+exclude = [
+    "migrations/*",
+]
+line-length = 120
+
+[format]
+quote-style = "double"
+
+[lint]
+preview = true
+select = [
+    "B", # flake8-bugbear rules
+    "C4", # flake8-comprehensions
+    "E", # pycodestyle E rules
+    "F", # pyflakes rules
+    "FURB", # refurb rules
+    "I", # isort rules
+    "N", # pep8-naming
+    "PT", # flake8-pytest-style rules
+    "PLC0208", # iteration-over-set
+    "PLC2801", # unnecessary-dunder-call
+    "PLC0414", # useless-import-alias
+    "PLE0604", # invalid-all-object
+    "PLE0605", # invalid-all-format
+    "PLR0402", # manual-from-import
+    "PLR1711", # useless-return
+    "PLR1714", # repeated-equality-comparison
+    "RUF013", # implicit-optional
+    "RUF019", # unnecessary-key-check
+    "RUF100", # unused-noqa
+    "RUF101", # redirected-noqa
+    "RUF200", # invalid-pyproject-toml
+    "RUF022", # unsorted-dunder-all
+    "S506", # unsafe-yaml-load
+    "SIM", # flake8-simplify rules
+    "TRY400", # error-instead-of-exception
+    "TRY401", # verbose-log-message
+    "UP", # pyupgrade rules
+    "W191", # tab-indentation
+    "W605", # invalid-escape-sequence
+]
+
+ignore = [
+    "E402", # module-import-not-at-top-of-file
+    "E711", # none-comparison
+    "E712", # true-false-comparison
+    "E721", # type-comparison
+    "E722", # bare-except
+    "E731", # lambda-assignment
+    "F821", # undefined-name
+    "F841", # unused-variable
+    "FURB113", # repeated-append
+    "FURB152", # math-constant
+    "UP007", # non-pep604-annotation
+    "UP032", # f-string
+    "UP045", # non-pep604-annotation-optional
+    "B005", # strip-with-multi-characters
+    "B006", # mutable-argument-default
+    "B007", # unused-loop-control-variable
+    "B026", # star-arg-unpacking-after-keyword-arg
+    "B903", # class-as-data-structure
+    "B904", # raise-without-from-inside-except
+    "B905", # zip-without-explicit-strict
+    "N806", # non-lowercase-variable-in-function
+    "N815", # mixed-case-variable-in-class-scope
+    "PT011", # pytest-raises-too-broad
+    "SIM102", # collapsible-if
+    "SIM103", # needless-bool
+    "SIM105", # suppressible-exception
+    "SIM107", # return-in-try-except-finally
+    "SIM108", # if-else-block-instead-of-if-exp
+    "SIM113", # enumerate-for-loop
+    "SIM117", # multiple-with-statements
+    "SIM210", # if-expr-with-true-false
+]
+
+[lint.per-file-ignores]
+"__init__.py" = [
+    "F401", # unused-import
+    "F811", # redefined-while-unused
+]
+"configs/*" = [
+    "N802", # invalid-function-name
+]
+"libs/gmpy2_pkcs10aep_cipher.py" = [
+    "N803", # invalid-argument-name
+]
+"tests/*" = [
+    "F811", # redefined-while-unused
+]
+
+[lint.pyflakes]
+allowed-unused-imports = [
+    "_pytest.monkeypatch",
+    "tests.integration_tests",
+    "tests.unit_tests",
+]

+ 14 - 13
api/Dockerfile

@@ -1,10 +1,10 @@
 # base image
-FROM python:3.10-slim-bookworm AS base
+FROM python:3.12-slim-bookworm AS base
 
 WORKDIR /app/api
 
 # Install Poetry
-ENV POETRY_VERSION=1.8.4
+ENV POETRY_VERSION=2.0.1
 
 # if you located in China, you can use aliyun mirror to speed up
 # RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
@@ -48,16 +48,18 @@ ENV TZ=UTC
 
 WORKDIR /app/api
 
-RUN apt-get update \
-    && apt-get install -y --no-install-recommends curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
-    # if you located in China, you can use aliyun mirror to speed up
-    # && echo "deb http://mirrors.aliyun.com/debian testing main" > /etc/apt/sources.list \
-    && echo "deb http://deb.debian.org/debian testing main" > /etc/apt/sources.list \
-    && apt-get update \
-    # For Security
-    && apt-get install -y --no-install-recommends expat=2.6.3-2 libldap-2.5-0=2.5.18+dfsg-3+b1 perl=5.40.0-6 libsqlite3-0=3.46.1-1 zlib1g=1:1.3.dfsg+really1.3.1-1+b1 \
-    # install a chinese font to support the use of tools like matplotlib
-    && apt-get install -y fonts-noto-cjk \
+RUN \
+    apt-get update \
+    # Install dependencies
+    && apt-get install -y --no-install-recommends \
+        # basic environment
+        curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
+        # For Security
+        expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
+        # install a chinese font to support the use of tools like matplotlib
+        fonts-noto-cjk \
+        # install libmagic to support the use of python-magic guess MIMETYPE
+        libmagic1 \
     && apt-get autoremove -y \
     && rm -rf /var/lib/apt/lists/*
 
@@ -76,7 +78,6 @@ COPY . /app/api/
 COPY docker/entrypoint.sh /entrypoint.sh
 RUN chmod +x /entrypoint.sh
 
-
 ARG COMMIT_SHA
 ENV COMMIT_SHA=${COMMIT_SHA}
 

+ 14 - 13
api/README.md

@@ -18,12 +18,17 @@
    ```
 
 2. Copy `.env.example` to `.env`
+
+   ```cli
+   cp .env.example .env 
+   ```
 3. Generate a `SECRET_KEY` in the `.env` file.
 
+   bash for Linux
    ```bash for Linux
    sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
    ```
-
+   bash for Mac
    ```bash for Mac
    secret_key=$(openssl rand -base64 42)
    sed -i '' "/^SECRET_KEY=/c\\
@@ -32,21 +37,19 @@
 
 4. Create environment.
 
-   Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. You can execute `poetry shell` to activate the environment.
-
-5. Install dependencies
+   Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. First, you need to add the poetry shell plugin, if you don't have it already, in order to run in a virtual environment. [Note: Poetry shell is no longer a native command so you need to install the poetry plugin beforehand]
 
    ```bash
-   poetry env use 3.10
-   poetry install
+   poetry self add poetry-plugin-shell
    ```
+   
+   Then, You can execute `poetry shell` to activate the environment.
 
-   In case of contributors missing to update dependencies for `pyproject.toml`, you can perform the following shell instead.
+5. Install dependencies
 
    ```bash
-   poetry shell                                               # activate current environment
-   poetry add $(cat requirements.txt)           # install dependencies of production and update pyproject.toml
-   poetry add $(cat requirements-dev.txt) --group dev    # install dependencies of development and update pyproject.toml
+   poetry env use 3.12
+   poetry install
    ```
 
 6. Run migrate
@@ -82,7 +85,5 @@
 2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
 
    ```bash
-   poetry run -C api bash dev/pytest/pytest_all_tests.sh
+   poetry run -P api bash dev/pytest/pytest_all_tests.sh
    ```
-
-

+ 24 - 91
api/app.py

@@ -1,108 +1,41 @@
 import os
+import sys
 
-from configs import dify_config
 
-if not dify_config.DEBUG:
-    from gevent import monkey
-
-    monkey.patch_all()
-
-    import grpc.experimental.gevent
-
-    grpc.experimental.gevent.init_gevent()
-
-import json
-import threading
-import time
-import warnings
-
-from flask import Response
-
-from app_factory import create_app
-
-# DO NOT REMOVE BELOW
-from events import event_handlers  # noqa: F401
-from extensions.ext_database import db
-
-# TODO: Find a way to avoid importing models here
-from models import account, dataset, model, source, task, tool, tools, web  # noqa: F401
-
-# DO NOT REMOVE ABOVE
-
-
-warnings.simplefilter("ignore", ResourceWarning)
-
-os.environ["TZ"] = "UTC"
-# windows platform not support tzset
-if hasattr(time, "tzset"):
-    time.tzset()
+def is_db_command():
+    if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
+        return True
+    return False
 
 
 # create app
-app = create_app()
-celery = app.extensions["celery"]
-
-if dify_config.TESTING:
-    print("App is running in TESTING mode")
-
-
-@app.after_request
-def after_request(response):
-    """Add Version headers to the response."""
-    response.set_cookie("remember_token", "", expires=0)
-    response.headers.add("X-Version", dify_config.CURRENT_VERSION)
-    response.headers.add("X-Env", dify_config.DEPLOY_ENV)
-    return response
-
-
-@app.route("/health")
-def health():
-    return Response(
-        json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
-        status=200,
-        content_type="application/json",
-    )
+if is_db_command():
+    from app_factory import create_migrations_app
 
+    app = create_migrations_app()
+else:
+    # It seems that JetBrains Python debugger does not work well with gevent,
+    # so we need to disable gevent in debug mode.
+    # If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent.
+    if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}:
+        from gevent import monkey  # type: ignore
 
-@app.route("/threads")
-def threads():
-    num_threads = threading.active_count()
-    threads = threading.enumerate()
+        # gevent
+        monkey.patch_all()
 
-    thread_list = []
-    for thread in threads:
-        thread_name = thread.name
-        thread_id = thread.ident
-        is_alive = thread.is_alive()
+        from grpc.experimental import gevent as grpc_gevent  # type: ignore
 
-        thread_list.append(
-            {
-                "name": thread_name,
-                "id": thread_id,
-                "is_alive": is_alive,
-            }
-        )
+        # grpc gevent
+        grpc_gevent.init_gevent()
 
-    return {
-        "pid": os.getpid(),
-        "thread_num": num_threads,
-        "threads": thread_list,
-    }
+        import psycogreen.gevent  # type: ignore
 
+        psycogreen.gevent.patch_psycopg()
 
-@app.route("/db-pool-stat")
-def pool_stat():
-    engine = db.engine
-    return {
-        "pid": os.getpid(),
-        "pool_size": engine.pool.size(),
-        "checked_in_connections": engine.pool.checkedin(),
-        "checked_out_connections": engine.pool.checkedout(),
-        "overflow_connections": engine.pool.overflow(),
-        "connection_timeout": engine.pool.timeout(),
-        "recycle_time": db.engine.pool._recycle,
-    }
+    from app_factory import create_app
 
+    app = create_app()
+    celery = app.extensions["celery"]
 
 if __name__ == "__main__":
     app.run(host="0.0.0.0", port=5001)

+ 75 - 152
api/app_factory.py

@@ -1,54 +1,14 @@
-import os
+import logging
+import time
 
 from configs import dify_config
-
-if not dify_config.DEBUG:
-    from gevent import monkey
-
-    monkey.patch_all()
-
-    import grpc.experimental.gevent
-
-    grpc.experimental.gevent.init_gevent()
-
-import json
-
-from flask import Flask, Response, request
-from flask_cors import CORS
-from werkzeug.exceptions import Unauthorized
-
-import contexts
-from commands import register_commands
-from configs import dify_config
-from extensions import (
-    ext_celery,
-    ext_code_based_extension,
-    ext_compress,
-    ext_database,
-    ext_hosting_provider,
-    ext_logging,
-    ext_login,
-    ext_mail,
-    ext_migrate,
-    ext_proxy_fix,
-    ext_redis,
-    ext_sentry,
-    ext_storage,
-)
-from extensions.ext_database import db
-from extensions.ext_login import login_manager
-from libs.passport import PassportService
-from services.account_service import AccountService
-
-
-class DifyApp(Flask):
-    pass
+from dify_app import DifyApp
 
 
 # ----------------------------
 # Application Factory Function
 # ----------------------------
-def create_flask_app_with_configs() -> Flask:
+def create_flask_app_with_configs() -> DifyApp:
     """
     create a raw flask app
     with configs loaded from .env file
@@ -56,123 +16,86 @@ def create_flask_app_with_configs() -> Flask:
     dify_app = DifyApp(__name__)
     dify_app.config.from_mapping(dify_config.model_dump())
 
-    # populate configs into system environment variables
-    for key, value in dify_app.config.items():
-        if isinstance(value, str):
-            os.environ[key] = value
-        elif isinstance(value, int | float | bool):
-            os.environ[key] = str(value)
-        elif value is None:
-            os.environ[key] = ""
-
     return dify_app
 
 
-def create_app() -> Flask:
+def create_app() -> DifyApp:
+    start_time = time.perf_counter()
     app = create_flask_app_with_configs()
-    app.secret_key = dify_config.SECRET_KEY
     initialize_extensions(app)
-    register_blueprints(app)
-    register_commands(app)
-
+    end_time = time.perf_counter()
+    if dify_config.DEBUG:
+        logging.info(f"Finished create_app ({round((end_time - start_time) * 1000, 2)} ms)")
     return app
 
 
-def initialize_extensions(app):
-    # Since the application instance is now created, pass it to each Flask
-    # extension instance to bind it to the Flask application instance (app)
-    ext_logging.init_app(app)
-    ext_compress.init_app(app)
-    ext_code_based_extension.init()
-    ext_database.init_app(app)
-    ext_migrate.init(app, db)
-    ext_redis.init_app(app)
-    ext_storage.init_app(app)
-    ext_celery.init_app(app)
-    ext_login.init_app(app)
-    ext_mail.init_app(app)
-    ext_hosting_provider.init_app(app)
-    ext_sentry.init_app(app)
-    ext_proxy_fix.init_app(app)
-
-
-# Flask-Login configuration
-@login_manager.request_loader
-def load_user_from_request(request_from_flask_login):
-    """Load user based on the request."""
-    if request.blueprint not in {"console", "inner_api"}:
-        return None
-    # Check if the user_id contains a dot, indicating the old format
-    auth_header = request.headers.get("Authorization", "")
-    if not auth_header:
-        auth_token = request.args.get("_token")
-        if not auth_token:
-            raise Unauthorized("Invalid Authorization token.")
-    else:
-        if " " not in auth_header:
-            raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
-        auth_scheme, auth_token = auth_header.split(None, 1)
-        auth_scheme = auth_scheme.lower()
-        if auth_scheme != "bearer":
-            raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
-
-    decoded = PassportService().verify(auth_token)
-    user_id = decoded.get("user_id")
-
-    logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
-    if logged_in_account:
-        contexts.tenant_id.set(logged_in_account.current_tenant_id)
-    return logged_in_account
-
-
-@login_manager.unauthorized_handler
-def unauthorized_handler():
-    """Handle unauthorized requests."""
-    return Response(
-        json.dumps({"code": "unauthorized", "message": "Unauthorized."}),
-        status=401,
-        content_type="application/json",
-    )
-
-
-# register blueprint routers
-def register_blueprints(app):
-    from controllers.console import bp as console_app_bp
-    from controllers.files import bp as files_bp
-    from controllers.inner_api import bp as inner_api_bp
-    from controllers.service_api import bp as service_api_bp
-    from controllers.web import bp as web_bp
-
-    CORS(
-        service_api_bp,
-        allow_headers=["Content-Type", "Authorization", "X-App-Code"],
-        methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
+def initialize_extensions(app: DifyApp):
+    from extensions import (
+        ext_app_metrics,
+        ext_blueprints,
+        ext_celery,
+        ext_code_based_extension,
+        ext_commands,
+        ext_compress,
+        ext_database,
+        ext_hosting_provider,
+        ext_import_modules,
+        ext_logging,
+        ext_login,
+        ext_mail,
+        ext_migrate,
+        ext_proxy_fix,
+        ext_redis,
+        ext_sentry,
+        ext_set_secretkey,
+        ext_storage,
+        ext_timezone,
+        ext_warnings,
     )
-    app.register_blueprint(service_api_bp)
 
-    CORS(
-        web_bp,
-        resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
-        supports_credentials=True,
-        allow_headers=["Content-Type", "Authorization", "X-App-Code"],
-        methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
-        expose_headers=["X-Version", "X-Env"],
-    )
-
-    app.register_blueprint(web_bp)
-
-    CORS(
-        console_app_bp,
-        resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
-        supports_credentials=True,
-        allow_headers=["Content-Type", "Authorization"],
-        methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
-        expose_headers=["X-Version", "X-Env"],
-    )
-
-    app.register_blueprint(console_app_bp)
+    extensions = [
+        ext_timezone,
+        ext_logging,
+        ext_warnings,
+        ext_import_modules,
+        ext_set_secretkey,
+        ext_compress,
+        ext_code_based_extension,
+        ext_database,
+        ext_app_metrics,
+        ext_migrate,
+        ext_redis,
+        ext_storage,
+        ext_celery,
+        ext_login,
+        ext_mail,
+        ext_hosting_provider,
+        ext_sentry,
+        ext_proxy_fix,
+        ext_blueprints,
+        ext_commands,
+    ]
+    for ext in extensions:
+        short_name = ext.__name__.split(".")[-1]
+        is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
+        if not is_enabled:
+            if dify_config.DEBUG:
+                logging.info(f"Skipped {short_name}")
+            continue
+
+        start_time = time.perf_counter()
+        ext.init_app(app)
+        end_time = time.perf_counter()
+        if dify_config.DEBUG:
+            logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
+
+
+def create_migrations_app():
+    app = create_flask_app_with_configs()
+    from extensions import ext_database, ext_migrate
 
-    CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"])
-    app.register_blueprint(files_bp)
+    # Initialize only required extensions
+    ext_database.init_app(app)
+    ext_migrate.init_app(app)
 
-    app.register_blueprint(inner_api_bp)
+    return app

+ 24 - 27
api/commands.py

@@ -159,8 +159,7 @@ def migrate_annotation_vector_database():
         try:
             # get apps info
             apps = (
-                db.session.query(App)
-                .filter(App.status == "normal")
+                App.query.filter(App.status == "normal")
                 .order_by(App.created_at.desc())
                 .paginate(page=page, per_page=50)
             )
@@ -259,7 +258,7 @@ def migrate_knowledge_vector_database():
     skipped_count = 0
     total_count = 0
     vector_type = dify_config.VECTOR_STORE
-    upper_colletion_vector_types = {
+    upper_collection_vector_types = {
         VectorType.MILVUS,
         VectorType.PGVECTOR,
         VectorType.RELYT,
@@ -267,7 +266,7 @@ def migrate_knowledge_vector_database():
         VectorType.ORACLE,
         VectorType.ELASTICSEARCH,
     }
-    lower_colletion_vector_types = {
+    lower_collection_vector_types = {
         VectorType.ANALYTICDB,
         VectorType.CHROMA,
         VectorType.MYSCALE,
@@ -285,8 +284,7 @@ def migrate_knowledge_vector_database():
     while True:
         try:
             datasets = (
-                db.session.query(Dataset)
-                .filter(Dataset.indexing_technique == "high_quality")
+                Dataset.query.filter(Dataset.indexing_technique == "high_quality")
                 .order_by(Dataset.created_at.desc())
                 .paginate(page=page, per_page=50)
             )
@@ -307,7 +305,7 @@ def migrate_knowledge_vector_database():
                         continue
                 collection_name = ""
                 dataset_id = dataset.id
-                if vector_type in upper_colletion_vector_types:
+                if vector_type in upper_collection_vector_types:
                     collection_name = Dataset.gen_collection_name_by_id(dataset_id)
                 elif vector_type == VectorType.QDRANT:
                     if dataset.collection_binding_id:
@@ -323,7 +321,7 @@ def migrate_knowledge_vector_database():
                     else:
                         collection_name = Dataset.gen_collection_name_by_id(dataset_id)
 
-                elif vector_type in lower_colletion_vector_types:
+                elif vector_type in lower_collection_vector_types:
                     collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
                 else:
                     raise ValueError(f"Vector store {vector_type} is not supported.")
@@ -450,7 +448,8 @@ def convert_to_agent_apps():
                 if app_id not in proceeded_app_ids:
                     proceeded_app_ids.append(app_id)
                     app = db.session.query(App).filter(App.id == app_id).first()
-                    apps.append(app)
+                    if app is not None:
+                        apps.append(app)
 
             if len(apps) == 0:
                 break
@@ -555,14 +554,20 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str
     if language not in languages:
         language = "en-US"
 
-    name = name.strip()
+    # Validates name encoding for non-Latin characters.
+    name = name.strip().encode("utf-8").decode("utf-8") if name else None
 
     # generate random password
     new_password = secrets.token_urlsafe(16)
 
     # register account
-    account = RegisterService.register(email=email, name=account_name, password=new_password, language=language)
-
+    account = RegisterService.register(
+        email=email,
+        name=account_name,
+        password=new_password,
+        language=language,
+        create_workspace_required=False,
+    )
     TenantService.create_owner_tenant_if_not_exist(account, name)
 
     click.echo(
@@ -582,14 +587,14 @@ def upgrade_db():
             click.echo(click.style("Starting database migration.", fg="green"))
 
             # run db migration
-            import flask_migrate
+            import flask_migrate  # type: ignore
 
             flask_migrate.upgrade()
 
             click.echo(click.style("Database migration successful!", fg="green"))
 
         except Exception as e:
-            logging.exception(f"Database migration failed: {e}")
+            logging.exception("Failed to execute database migration")
         finally:
             lock.release()
     else:
@@ -620,6 +625,10 @@ where sites.id is null limit 1000"""
 
                 try:
                     app = db.session.query(App).filter(App.id == app_id).first()
+                    if not app:
+                        print(f"App {app_id} not found")
+                        continue
+
                     tenant = app.tenant
                     if tenant:
                         accounts = tenant.get_accounts()
@@ -633,22 +642,10 @@ where sites.id is null limit 1000"""
                 except Exception as e:
                     failed_app_ids.append(app_id)
                     click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
-                    logging.exception(f"Fix app related site missing issue failed, error: {e}")
+                    logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
                     continue
 
             if not processed_count:
                 break
 
     click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
-
-
-def register_commands(app):
-    app.cli.add_command(reset_password)
-    app.cli.add_command(reset_email)
-    app.cli.add_command(reset_encrypt_key_pair)
-    app.cli.add_command(vdb_migrate)
-    app.cli.add_command(convert_to_agent_apps)
-    app.cli.add_command(add_qdrant_doc_id_index)
-    app.cli.add_command(create_tenant)
-    app.cli.add_command(upgrade_db)
-    app.cli.add_command(fix_app_site_missing)

+ 66 - 8
api/configs/app_config.py

@@ -1,11 +1,51 @@
-from pydantic_settings import SettingsConfigDict
+import logging
+from typing import Any
 
-from configs.deploy import DeploymentConfig
-from configs.enterprise import EnterpriseFeatureConfig
-from configs.extra import ExtraServiceConfig
-from configs.feature import FeatureConfig
-from configs.middleware import MiddlewareConfig
-from configs.packaging import PackagingInfo
+from pydantic.fields import FieldInfo
+from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
+
+from .deploy import DeploymentConfig
+from .enterprise import EnterpriseFeatureConfig
+from .extra import ExtraServiceConfig
+from .feature import FeatureConfig
+from .middleware import MiddlewareConfig
+from .packaging import PackagingInfo
+from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
+from .remote_settings_sources.apollo import ApolloSettingsSource
+
+logger = logging.getLogger(__name__)
+
+
+class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
+    def __init__(self, settings_cls: type[BaseSettings]):
+        super().__init__(settings_cls)
+
+    def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+        raise NotImplementedError
+
+    def __call__(self) -> dict[str, Any]:
+        current_state = self.current_state
+        remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
+        if not remote_source_name:
+            return {}
+
+        remote_source: RemoteSettingsSource | None = None
+        match remote_source_name:
+            case RemoteSettingsSourceName.APOLLO:
+                remote_source = ApolloSettingsSource(current_state)
+            case _:
+                logger.warning(f"Unsupported remote source: {remote_source_name}")
+                return {}
+
+        d: dict[str, Any] = {}
+
+        for field_name, field in self.settings_cls.model_fields.items():
+            field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
+            field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
+            if field_value is not None:
+                d[field_key] = field_value
+
+        return d
 
 
 class DifyConfig(
@@ -19,6 +59,8 @@ class DifyConfig(
     MiddlewareConfig,
     # Extra service configs
     ExtraServiceConfig,
+    # Remote source configs
+    RemoteSettingsSourceConfig,
     # Enterprise feature configs
     # **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
     EnterpriseFeatureConfig,
@@ -27,7 +69,6 @@ class DifyConfig(
         # read from dotenv format config file
         env_file=".env",
         env_file_encoding="utf-8",
-        frozen=True,
         # ignore extra attributes
         extra="ignore",
     )
@@ -36,3 +77,20 @@ class DifyConfig(
     # please consider to arrange it in the proper config group of existed or added
     # for better readability and maintainability.
     # Thanks for your concentration and consideration.
+
+    @classmethod
+    def settings_customise_sources(
+        cls,
+        settings_cls: type[BaseSettings],
+        init_settings: PydanticBaseSettingsSource,
+        env_settings: PydanticBaseSettingsSource,
+        dotenv_settings: PydanticBaseSettingsSource,
+        file_secret_settings: PydanticBaseSettingsSource,
+    ) -> tuple[PydanticBaseSettingsSource, ...]:
+        return (
+            init_settings,
+            env_settings,
+            RemoteSettingsSourceFactory(settings_cls),
+            dotenv_settings,
+            file_secret_settings,
+        )

+ 0 - 5
api/configs/deploy/__init__.py

@@ -17,11 +17,6 @@ class DeploymentConfig(BaseSettings):
         default=False,
     )
 
-    TESTING: bool = Field(
-        description="Enable testing mode for running automated tests",
-        default=False,
-    )
-
     EDITION: str = Field(
         description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
         default="SELF_HOSTED",

+ 69 - 21
api/configs/feature/__init__.py

@@ -146,7 +146,7 @@ class EndpointConfig(BaseSettings):
     )
 
     CONSOLE_WEB_URL: str = Field(
-        description="Base URL for the console web interface," "used for frontend references and CORS configuration",
+        description="Base URL for the console web interface,used for frontend references and CORS configuration",
         default="",
     )
 
@@ -239,7 +239,6 @@ class HttpConfig(BaseSettings):
     )
 
     @computed_field
-    @property
     def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
         return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
 
@@ -250,7 +249,6 @@ class HttpConfig(BaseSettings):
     )
 
     @computed_field
-    @property
     def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
         return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
 
@@ -317,8 +315,8 @@ class HttpConfig(BaseSettings):
     )
 
     RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
-        description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug"
-        " to respect X-* headers to redirect clients",
+        description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
+        " when the app is behind a single trusted reverse proxy.",
         default=False,
     )
 
@@ -433,12 +431,28 @@ class WorkflowConfig(BaseSettings):
         default=5,
     )
 
+    WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
+        description="Maximum allowed depth for nested parallel executions",
+        default=3,
+    )
+
     MAX_VARIABLE_SIZE: PositiveInt = Field(
         description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
         default=200 * 1024,
     )
 
 
+class WorkflowNodeExecutionConfig(BaseSettings):
+    """
+    Configuration for workflow node execution
+    """
+
+    MAX_SUBMIT_COUNT: PositiveInt = Field(
+        description="Maximum number of submitted thread count in a ThreadPool for parallel node execution",
+        default=100,
+    )
+
+
 class AuthConfig(BaseSettings):
     """
     Configuration for authentication and OAuth
@@ -474,6 +488,21 @@ class AuthConfig(BaseSettings):
         default=60,
     )
 
+    REFRESH_TOKEN_EXPIRE_DAYS: PositiveFloat = Field(
+        description="Expiration time for refresh tokens in days",
+        default=30,
+    )
+
+    LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
+        description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
+        default=86400,
+    )
+
+    FORGOT_PASSWORD_LOCKOUT_DURATION: PositiveInt = Field(
+        description="Time (in seconds) a user must wait before retrying password reset after exceeding the rate limit.",
+        default=86400,
+    )
+
 
 class ModerationConfig(BaseSettings):
     """
@@ -582,7 +611,12 @@ class RagEtlConfig(BaseSettings):
 
     UNSTRUCTURED_API_KEY: Optional[str] = Field(
         description="API key for Unstructured.io service",
-        default=None,
+        default="",
+    )
+
+    SCARF_NO_ANALYTICS: Optional[str] = Field(
+        description="This is about whether to disable Scarf analytics in Unstructured library.",
+        default="false",
     )
 
 
@@ -616,6 +650,11 @@ class DataSetConfig(BaseSettings):
         default=False,
     )
 
+    PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field(
+        description="Interval in days for message cleanup operations - plan: sandbox",
+        default=30,
+    )
+
 
 class WorkspaceConfig(BaseSettings):
     """
@@ -635,18 +674,18 @@ class IndexingConfig(BaseSettings):
 
     INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
         description="Maximum token length for text segmentation during indexing",
-        default=1000,
+        default=4000,
     )
 
-
-class VisionFormatConfig(BaseSettings):
-    MULTIMODAL_SEND_IMAGE_FORMAT: Literal["base64", "url"] = Field(
-        description="Format for sending images in multimodal contexts ('base64' or 'url'), default is base64",
-        default="base64",
+    CHILD_CHUNKS_PREVIEW_NUMBER: PositiveInt = Field(
+        description="Maximum number of child chunks to preview",
+        default=50,
     )
 
-    MULTIMODAL_SEND_VIDEO_FORMAT: Literal["base64", "url"] = Field(
-        description="Format for sending videos in multimodal contexts ('base64' or 'url'), default is base64",
+
+class MultiModalTransferConfig(BaseSettings):
+    MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
+        description="Format for sending files in multimodal contexts ('base64' or 'url'), default is base64",
         default="base64",
     )
 
@@ -689,27 +728,27 @@ class PositionConfig(BaseSettings):
         default="",
     )
 
-    @computed_field
+    @property
     def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
         return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
 
-    @computed_field
+    @property
     def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
         return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
 
-    @computed_field
+    @property
     def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
         return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
 
-    @computed_field
+    @property
     def POSITION_TOOL_PINS_LIST(self) -> list[str]:
         return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
 
-    @computed_field
+    @property
     def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
         return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
 
-    @computed_field
+    @property
     def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
         return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
 
@@ -741,6 +780,13 @@ class LoginConfig(BaseSettings):
     )
 
 
+class AccountConfig(BaseSettings):
+    ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
+        description="Duration in minutes for which a account deletion token remains valid",
+        default=5,
+    )
+
+
 class FeatureConfig(
     # place the configs in alphabet order
     AppExecutionConfig,
@@ -752,21 +798,23 @@ class FeatureConfig(
     FileAccessConfig,
     FileUploadConfig,
     HttpConfig,
-    VisionFormatConfig,
     InnerAPIConfig,
     IndexingConfig,
     LoggingConfig,
     MailConfig,
     ModelLoadBalanceConfig,
     ModerationConfig,
+    MultiModalTransferConfig,
     PositionConfig,
     RagEtlConfig,
     SecurityConfig,
     ToolConfig,
     UpdateConfig,
     WorkflowConfig,
+    WorkflowNodeExecutionConfig,
     WorkspaceConfig,
     LoginConfig,
+    AccountConfig,
     # hosted services config
     HostedServiceConfig,
     CeleryBeatConfig,

+ 35 - 2
api/configs/feature/hosted_service/__init__.py

@@ -1,9 +1,40 @@
 from typing import Optional
 
-from pydantic import Field, NonNegativeInt
+from pydantic import Field, NonNegativeInt, computed_field
 from pydantic_settings import BaseSettings
 
 
+class HostedCreditConfig(BaseSettings):
+    HOSTED_MODEL_CREDIT_CONFIG: str = Field(
+        description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
+        default="",
+    )
+
+    def get_model_credits(self, model_name: str) -> int:
+        """
+        Get credit value for a specific model name.
+        Returns 1 if model is not found in configuration (default credit).
+
+        :param model_name: The name of the model to search for
+        :return: The credit value for the model
+        """
+        if not self.HOSTED_MODEL_CREDIT_CONFIG:
+            return 1
+
+        try:
+            credit_map = dict(
+                item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
+            )
+
+            # Search for matching model pattern
+            for pattern, credit in credit_map.items():
+                if pattern.strip() == model_name:
+                    return int(credit)
+            return 1  # Default quota if no match found
+        except (ValueError, AttributeError):
+            return 1  # Return default quota if parsing fails
+
+
 class HostedOpenAiConfig(BaseSettings):
     """
     Configuration for hosted OpenAI service
@@ -181,7 +212,7 @@ class HostedFetchAppTemplateConfig(BaseSettings):
     """
 
     HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
-        description="Mode for fetching app templates: remote, db, or builtin" " default to remote,",
+        description="Mode for fetching app templates: remote, db, or builtin default to remote,",
         default="remote",
     )
 
@@ -202,5 +233,7 @@ class HostedServiceConfig(
     HostedZhipuAIConfig,
     # moderation
     HostedModerationConfig,
+    # credit config
+    HostedCreditConfig,
 ):
     pass

+ 54 - 42
api/configs/middleware/__init__.py

@@ -1,54 +1,69 @@
-from typing import Any, Optional
+from typing import Any, Literal, Optional
 from urllib.parse import quote_plus
 
 from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
 from pydantic_settings import BaseSettings
 
-from configs.middleware.cache.redis_config import RedisConfig
-from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
-from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig
-from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig
-from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageConfig
-from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
-from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
-from configs.middleware.storage.oci_storage_config import OCIStorageConfig
-from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig
-from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
-from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
-from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
-from configs.middleware.vdb.baidu_vector_config import BaiduVectorDBConfig
-from configs.middleware.vdb.chroma_config import ChromaConfig
-from configs.middleware.vdb.couchbase_config import CouchbaseConfig
-from configs.middleware.vdb.elasticsearch_config import ElasticsearchConfig
-from configs.middleware.vdb.lindorm_config import LindormConfig
-from configs.middleware.vdb.milvus_config import MilvusConfig
-from configs.middleware.vdb.myscale_config import MyScaleConfig
-from configs.middleware.vdb.oceanbase_config import OceanBaseVectorConfig
-from configs.middleware.vdb.opensearch_config import OpenSearchConfig
-from configs.middleware.vdb.oracle_config import OracleConfig
-from configs.middleware.vdb.pgvector_config import PGVectorConfig
-from configs.middleware.vdb.pgvectors_config import PGVectoRSConfig
-from configs.middleware.vdb.qdrant_config import QdrantConfig
-from configs.middleware.vdb.relyt_config import RelytConfig
-from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig
-from configs.middleware.vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
-from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig
-from configs.middleware.vdb.upstash_config import UpstashConfig
-from configs.middleware.vdb.vikingdb_config import VikingDBConfig
-from configs.middleware.vdb.weaviate_config import WeaviateConfig
+from .cache.redis_config import RedisConfig
+from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
+from .storage.amazon_s3_storage_config import S3StorageConfig
+from .storage.azure_blob_storage_config import AzureBlobStorageConfig
+from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
+from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
+from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
+from .storage.oci_storage_config import OCIStorageConfig
+from .storage.opendal_storage_config import OpenDALStorageConfig
+from .storage.supabase_storage_config import SupabaseStorageConfig
+from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
+from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
+from .vdb.analyticdb_config import AnalyticdbConfig
+from .vdb.baidu_vector_config import BaiduVectorDBConfig
+from .vdb.chroma_config import ChromaConfig
+from .vdb.couchbase_config import CouchbaseConfig
+from .vdb.elasticsearch_config import ElasticsearchConfig
+from .vdb.lindorm_config import LindormConfig
+from .vdb.milvus_config import MilvusConfig
+from .vdb.myscale_config import MyScaleConfig
+from .vdb.oceanbase_config import OceanBaseVectorConfig
+from .vdb.opensearch_config import OpenSearchConfig
+from .vdb.oracle_config import OracleConfig
+from .vdb.pgvector_config import PGVectorConfig
+from .vdb.pgvectors_config import PGVectoRSConfig
+from .vdb.qdrant_config import QdrantConfig
+from .vdb.relyt_config import RelytConfig
+from .vdb.tencent_vector_config import TencentVectorDBConfig
+from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
+from .vdb.tidb_vector_config import TiDBVectorConfig
+from .vdb.upstash_config import UpstashConfig
+from .vdb.vikingdb_config import VikingDBConfig
+from .vdb.weaviate_config import WeaviateConfig
 
 
 class StorageConfig(BaseSettings):
-    STORAGE_TYPE: str = Field(
+    STORAGE_TYPE: Literal[
+        "opendal",
+        "s3",
+        "aliyun-oss",
+        "azure-blob",
+        "baidu-obs",
+        "google-storage",
+        "huawei-obs",
+        "oci-storage",
+        "tencent-cos",
+        "volcengine-tos",
+        "supabase",
+        "local",
+    ] = Field(
         description="Type of storage to use."
-        " Options: 'local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', 'huawei-obs', "
-        "'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'local'.",
-        default="local",
+        " Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
+        "'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
+        default="opendal",
     )
 
     STORAGE_LOCAL_PATH: str = Field(
         description="Path for local storage when STORAGE_TYPE is set to 'local'.",
         default="storage",
+        deprecated=True,
     )
 
 
@@ -73,7 +88,7 @@ class KeywordStoreConfig(BaseSettings):
     )
 
 
-class DatabaseConfig:
+class DatabaseConfig(BaseSettings):
     DB_HOST: str = Field(
         description="Hostname or IP address of the database server.",
         default="localhost",
@@ -115,7 +130,6 @@ class DatabaseConfig:
     )
 
     @computed_field
-    @property
     def SQLALCHEMY_DATABASE_URI(self) -> str:
         db_extras = (
             f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
@@ -153,7 +167,6 @@ class DatabaseConfig:
     )
 
     @computed_field
-    @property
     def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
         return {
             "pool_size": self.SQLALCHEMY_POOL_SIZE,
@@ -191,7 +204,6 @@ class CeleryConfig(DatabaseConfig):
     )
 
     @computed_field
-    @property
     def CELERY_RESULT_BACKEND(self) -> str | None:
         return (
             "db+{}".format(self.SQLALCHEMY_DATABASE_URI)
@@ -199,7 +211,6 @@ class CeleryConfig(DatabaseConfig):
             else self.CELERY_BROKER_URL
         )
 
-    @computed_field
     @property
     def BROKER_USE_SSL(self) -> bool:
         return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
@@ -235,6 +246,7 @@ class MiddlewareConfig(
     GoogleCloudStorageConfig,
     HuaweiCloudOBSStorageConfig,
     OCIStorageConfig,
+    OpenDALStorageConfig,
     S3StorageConfig,
     SupabaseStorageConfig,
     TencentCloudCOSStorageConfig,

+ 15 - 0
api/configs/middleware/cache/redis_config.py

@@ -68,3 +68,18 @@ class RedisConfig(BaseSettings):
         description="Socket timeout in seconds for Redis Sentinel connections",
         default=0.1,
     )
+
+    REDIS_USE_CLUSTERS: bool = Field(
+        description="Enable Redis Clusters mode for high availability",
+        default=False,
+    )
+
+    REDIS_CLUSTERS: Optional[str] = Field(
+        description="Comma-separated list of Redis Clusters nodes (host:port)",
+        default=None,
+    )
+
+    REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
+        description="Password for Redis Clusters authentication (if required)",
+        default=None,
+    )

+ 3 - 2
api/configs/middleware/storage/baidu_obs_storage_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class BaiduOBSStorageConfig(BaseModel):
+class BaiduOBSStorageConfig(BaseSettings):
     """
     Configuration settings for Baidu Object Storage Service (OBS)
     """

+ 3 - 2
api/configs/middleware/storage/huawei_obs_storage_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class HuaweiCloudOBSStorageConfig(BaseModel):
+class HuaweiCloudOBSStorageConfig(BaseSettings):
     """
     Configuration settings for Huawei Cloud Object Storage Service (OBS)
     """

+ 9 - 0
api/configs/middleware/storage/opendal_storage_config.py

@@ -0,0 +1,9 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class OpenDALStorageConfig(BaseSettings):
+    OPENDAL_SCHEME: str = Field(
+        default="fs",
+        description="OpenDAL scheme.",
+    )

+ 3 - 2
api/configs/middleware/storage/supabase_storage_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class SupabaseStorageConfig(BaseModel):
+class SupabaseStorageConfig(BaseSettings):
     """
     Configuration settings for Supabase Object Storage Service
     """

+ 3 - 2
api/configs/middleware/storage/volcengine_tos_storage_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class VolcengineTOSStorageConfig(BaseModel):
+class VolcengineTOSStorageConfig(BaseSettings):
     """
     Configuration settings for Volcengine Tinder Object Storage (TOS)
     """

+ 11 - 2
api/configs/middleware/vdb/analyticdb_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
 
 
-class AnalyticdbConfig(BaseModel):
+class AnalyticdbConfig(BaseSettings):
     """
     Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
     Refer to the following documentation for details on obtaining credentials:
@@ -40,3 +41,11 @@ class AnalyticdbConfig(BaseModel):
         description="The password for accessing the specified namespace within the AnalyticDB instance"
         " (if namespace feature is enabled).",
     )
+    ANALYTICDB_HOST: Optional[str] = Field(
+        default=None, description="The host of the AnalyticDB instance you want to connect to."
+    )
+    ANALYTICDB_PORT: PositiveInt = Field(
+        default=5432, description="The port of the AnalyticDB instance you want to connect to."
+    )
+    ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.")
+    ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.")

+ 3 - 2
api/configs/middleware/vdb/couchbase_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class CouchbaseConfig(BaseModel):
+class CouchbaseConfig(BaseSettings):
     """
     Couchbase configs
     """

+ 11 - 0
api/configs/middleware/vdb/lindorm_config.py

@@ -21,3 +21,14 @@ class LindormConfig(BaseSettings):
         description="Lindorm password",
         default=None,
     )
+    DEFAULT_INDEX_TYPE: Optional[str] = Field(
+        description="Lindorm Vector Index Type, hnsw or flat is available in dify",
+        default="hnsw",
+    )
+    DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
+        description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
+    )
+    USING_UGC_INDEX: Optional[bool] = Field(
+        description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
+        default=False,
+    )

+ 6 - 0
api/configs/middleware/vdb/milvus_config.py

@@ -33,3 +33,9 @@ class MilvusConfig(BaseSettings):
         description="Name of the Milvus database to connect to (default is 'default')",
         default="default",
     )
+
+    MILVUS_ENABLE_HYBRID_SEARCH: bool = Field(
+        description="Enable hybrid search features (requires Milvus >= 2.5.0). Set to false for compatibility with "
+        "older versions",
+        default=True,
+    )

+ 3 - 2
api/configs/middleware/vdb/myscale_config.py

@@ -1,7 +1,8 @@
-from pydantic import BaseModel, Field, PositiveInt
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
 
 
-class MyScaleConfig(BaseModel):
+class MyScaleConfig(BaseSettings):
     """
     Configuration settings for MyScale vector database
     """

+ 3 - 2
api/configs/middleware/vdb/vikingdb_config.py

@@ -1,9 +1,10 @@
 from typing import Optional
 
-from pydantic import BaseModel, Field
+from pydantic import Field
+from pydantic_settings import BaseSettings
 
 
-class VikingDBConfig(BaseModel):
+class VikingDBConfig(BaseSettings):
     """
     Configuration for connecting to Volcengine VikingDB.
     Refer to the following documentation for details on obtaining credentials:

+ 1 - 1
api/configs/packaging/__init__.py

@@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
 
     CURRENT_VERSION: str = Field(
         description="Dify version",
-        default="0.11.0",
+        default="0.15.3",
     )
 
     COMMIT_SHA: str = Field(

+ 17 - 0
api/configs/remote_settings_sources/__init__.py

@@ -0,0 +1,17 @@
+from typing import Optional
+
+from pydantic import Field
+
+from .apollo import ApolloSettingsSourceInfo
+from .base import RemoteSettingsSource
+from .enums import RemoteSettingsSourceName
+
+
+class RemoteSettingsSourceConfig(ApolloSettingsSourceInfo):
+    REMOTE_SETTINGS_SOURCE_NAME: RemoteSettingsSourceName | str = Field(
+        description="name of remote config source",
+        default="",
+    )
+
+
+__all__ = ["RemoteSettingsSource", "RemoteSettingsSourceConfig", "RemoteSettingsSourceName"]

+ 55 - 0
api/configs/remote_settings_sources/apollo/__init__.py

@@ -0,0 +1,55 @@
+from collections.abc import Mapping
+from typing import Any, Optional
+
+from pydantic import Field
+from pydantic.fields import FieldInfo
+from pydantic_settings import BaseSettings
+
+from configs.remote_settings_sources.base import RemoteSettingsSource
+
+from .client import ApolloClient
+
+
+class ApolloSettingsSourceInfo(BaseSettings):
+    """
+    Packaging build information
+    """
+
+    APOLLO_APP_ID: Optional[str] = Field(
+        description="apollo app_id",
+        default=None,
+    )
+
+    APOLLO_CLUSTER: Optional[str] = Field(
+        description="apollo cluster",
+        default=None,
+    )
+
+    APOLLO_CONFIG_URL: Optional[str] = Field(
+        description="apollo config url",
+        default=None,
+    )
+
+    APOLLO_NAMESPACE: Optional[str] = Field(
+        description="apollo namespace",
+        default=None,
+    )
+
+
+class ApolloSettingsSource(RemoteSettingsSource):
+    def __init__(self, configs: Mapping[str, Any]):
+        self.client = ApolloClient(
+            app_id=configs["APOLLO_APP_ID"],
+            cluster=configs["APOLLO_CLUSTER"],
+            config_url=configs["APOLLO_CONFIG_URL"],
+            start_hot_update=False,
+            _notification_map={configs["APOLLO_NAMESPACE"]: -1},
+        )
+        self.namespace = configs["APOLLO_NAMESPACE"]
+        self.remote_configs = self.client.get_all_dicts(self.namespace)
+
+    def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+        if not isinstance(self.remote_configs, dict):
+            raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
+        field_value = self.remote_configs.get(field_name)
+        return field_value, field_name, False

+ 304 - 0
api/configs/remote_settings_sources/apollo/client.py

@@ -0,0 +1,304 @@
+import hashlib
+import json
+import logging
+import os
+import threading
+import time
+from collections.abc import Mapping
+from pathlib import Path
+
+from .python_3x import http_request, makedirs_wrapper
+from .utils import (
+    CONFIGURATIONS,
+    NAMESPACE_NAME,
+    NOTIFICATION_ID,
+    get_value_from_dict,
+    init_ip,
+    no_key_cache_key,
+    signature,
+    url_encode_wrapper,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class ApolloClient:
+    def __init__(
+        self,
+        config_url,
+        app_id,
+        cluster="default",
+        secret="",
+        start_hot_update=True,
+        change_listener=None,
+        _notification_map=None,
+    ):
+        # Core routing parameters
+        self.config_url = config_url
+        self.cluster = cluster
+        self.app_id = app_id
+
+        # Non-core parameters
+        self.ip = init_ip()
+        self.secret = secret
+
+        # Check the parameter variables
+
+        # Private control variables
+        self._cycle_time = 5
+        self._stopping = False
+        self._cache = {}
+        self._no_key = {}
+        self._hash = {}
+        self._pull_timeout = 75
+        self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
+        self._long_poll_thread = None
+        self._change_listener = change_listener  # "add" "delete" "update"
+        if _notification_map is None:
+            _notification_map = {"application": -1}
+        self._notification_map = _notification_map
+        self.last_release_key = None
+        # Private startup method
+        self._path_checker()
+        if start_hot_update:
+            self._start_hot_update()
+
+        # start the heartbeat thread
+        heartbeat = threading.Thread(target=self._heart_beat)
+        heartbeat.daemon = True
+        heartbeat.start()
+
+    def get_json_from_net(self, namespace="application"):
+        url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
+            self.config_url, self.app_id, self.cluster, namespace, "", self.ip
+        )
+        try:
+            code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
+            if code == 200:
+                if not body:
+                    logger.error(f"get_json_from_net load configs failed, body is {body}")
+                    return None
+                data = json.loads(body)
+                data = data["configurations"]
+                return_data = {CONFIGURATIONS: data}
+                return return_data
+            else:
+                return None
+        except Exception:
+            logger.exception("an error occurred in get_json_from_net")
+            return None
+
+    def get_value(self, key, default_val=None, namespace="application"):
+        try:
+            # read memory configuration
+            namespace_cache = self._cache.get(namespace)
+            val = get_value_from_dict(namespace_cache, key)
+            if val is not None:
+                return val
+
+            no_key = no_key_cache_key(namespace, key)
+            if no_key in self._no_key:
+                return default_val
+
+            # read the network configuration
+            namespace_data = self.get_json_from_net(namespace)
+            val = get_value_from_dict(namespace_data, key)
+            if val is not None:
+                self._update_cache_and_file(namespace_data, namespace)
+                return val
+
+            # read the file configuration
+            namespace_cache = self._get_local_cache(namespace)
+            val = get_value_from_dict(namespace_cache, key)
+            if val is not None:
+                self._update_cache_and_file(namespace_cache, namespace)
+                return val
+
+            # If all of them are not obtained, the default value is returned
+            # and the local cache is set to None
+            self._set_local_cache_none(namespace, key)
+            return default_val
+        except Exception:
+            logger.exception("get_value has error, [key is %s], [namespace is %s]", key, namespace)
+            return default_val
+
+    # Set the key of a namespace to none, and do not set default val
+    # to ensure the real-time correctness of the function call.
+    # If the user does not have the same default val twice
+    # and the default val is used here, there may be a problem.
+    def _set_local_cache_none(self, namespace, key):
+        no_key = no_key_cache_key(namespace, key)
+        self._no_key[no_key] = key
+
+    def _start_hot_update(self):
+        self._long_poll_thread = threading.Thread(target=self._listener)
+        # When the asynchronous thread is started, the daemon thread will automatically exit
+        # when the main thread is launched.
+        self._long_poll_thread.daemon = True
+        self._long_poll_thread.start()
+
+    def stop(self):
+        self._stopping = True
+        logger.info("Stopping listener...")
+
+    # Call the set callback function, and if it is abnormal, try it out
+    def _call_listener(self, namespace, old_kv, new_kv):
+        if self._change_listener is None:
+            return
+        if old_kv is None:
+            old_kv = {}
+        if new_kv is None:
+            new_kv = {}
+        try:
+            for key in old_kv:
+                new_value = new_kv.get(key)
+                old_value = old_kv.get(key)
+                if new_value is None:
+                    # If newValue is empty, it means key, and the value is deleted.
+                    self._change_listener("delete", namespace, key, old_value)
+                    continue
+                if new_value != old_value:
+                    self._change_listener("update", namespace, key, new_value)
+                    continue
+            for key in new_kv:
+                new_value = new_kv.get(key)
+                old_value = old_kv.get(key)
+                if old_value is None:
+                    self._change_listener("add", namespace, key, new_value)
+        except BaseException as e:
+            logger.warning(str(e))
+
+    def _path_checker(self):
+        if not os.path.isdir(self._cache_file_path):
+            makedirs_wrapper(self._cache_file_path)
+
+    # update the local cache and file cache
+    def _update_cache_and_file(self, namespace_data, namespace="application"):
+        # update the local cache
+        self._cache[namespace] = namespace_data
+        # update the file cache
+        new_string = json.dumps(namespace_data)
+        new_hash = hashlib.md5(new_string.encode("utf-8")).hexdigest()
+        if self._hash.get(namespace) == new_hash:
+            pass
+        else:
+            file_path = Path(self._cache_file_path) / f"{self.app_id}_configuration_{namespace}.txt"
+            file_path.write_text(new_string)
+            self._hash[namespace] = new_hash
+
+    # get the configuration from the local file
+    def _get_local_cache(self, namespace="application"):
+        cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
+        if os.path.isfile(cache_file_path):
+            with open(cache_file_path) as f:
+                result = json.loads(f.readline())
+            return result
+        return {}
+
+    def _long_poll(self):
+        notifications = []
+        for key in self._cache:
+            namespace_data = self._cache[key]
+            notification_id = -1
+            if NOTIFICATION_ID in namespace_data:
+                notification_id = self._cache[key][NOTIFICATION_ID]
+            notifications.append({NAMESPACE_NAME: key, NOTIFICATION_ID: notification_id})
+        try:
+            # if the length is 0 it is returned directly
+            if len(notifications) == 0:
+                return
+            url = "{}/notifications/v2".format(self.config_url)
+            params = {
+                "appId": self.app_id,
+                "cluster": self.cluster,
+                "notifications": json.dumps(notifications, ensure_ascii=False),
+            }
+            param_str = url_encode_wrapper(params)
+            url = url + "?" + param_str
+            code, body = http_request(url, self._pull_timeout, headers=self._sign_headers(url))
+            http_code = code
+            if http_code == 304:
+                logger.debug("No change, loop...")
+                return
+            if http_code == 200:
+                if not body:
+                    logger.error(f"_long_poll load configs failed,body is {body}")
+                    return
+                data = json.loads(body)
+                for entry in data:
+                    namespace = entry[NAMESPACE_NAME]
+                    n_id = entry[NOTIFICATION_ID]
+                    logger.info("%s has changes: notificationId=%d", namespace, n_id)
+                    self._get_net_and_set_local(namespace, n_id, call_change=True)
+                    return
+            else:
+                logger.warning("Sleep...")
+        except Exception as e:
+            logger.warning(str(e))
+
+    def _get_net_and_set_local(self, namespace, n_id, call_change=False):
+        namespace_data = self.get_json_from_net(namespace)
+        if not namespace_data:
+            return
+        namespace_data[NOTIFICATION_ID] = n_id
+        old_namespace = self._cache.get(namespace)
+        self._update_cache_and_file(namespace_data, namespace)
+        if self._change_listener is not None and call_change and old_namespace:
+            old_kv = old_namespace.get(CONFIGURATIONS)
+            new_kv = namespace_data.get(CONFIGURATIONS)
+            self._call_listener(namespace, old_kv, new_kv)
+
+    def _listener(self):
+        logger.info("start long_poll")
+        while not self._stopping:
+            self._long_poll()
+            time.sleep(self._cycle_time)
+        logger.info("stopped, long_poll")
+
+    # add the need for endorsement to the header
+    def _sign_headers(self, url: str) -> Mapping[str, str]:
+        headers: dict[str, str] = {}
+        if self.secret == "":
+            return headers
+        uri = url[len(self.config_url) : len(url)]
+        time_unix_now = str(int(round(time.time() * 1000)))
+        headers["Authorization"] = "Apollo " + self.app_id + ":" + signature(time_unix_now, uri, self.secret)
+        headers["Timestamp"] = time_unix_now
+        return headers
+
+    def _heart_beat(self):
+        while not self._stopping:
+            for namespace in self._notification_map:
+                self._do_heart_beat(namespace)
+            time.sleep(60 * 10)  # 10分钟
+
+    def _do_heart_beat(self, namespace):
+        url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
+        try:
+            code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
+            if code == 200:
+                if not body:
+                    logger.error(f"_do_heart_beat load configs failed,body is {body}")
+                    return None
+                data = json.loads(body)
+                if self.last_release_key == data["releaseKey"]:
+                    return None
+                self.last_release_key = data["releaseKey"]
+                data = data["configurations"]
+                self._update_cache_and_file(data, namespace)
+            else:
+                return None
+        except Exception:
+            logger.exception("an error occurred in _do_heart_beat")
+            return None
+
+    def get_all_dicts(self, namespace):
+        namespace_data = self._cache.get(namespace)
+        if namespace_data is None:
+            net_namespace_data = self.get_json_from_net(namespace)
+            if not net_namespace_data:
+                return namespace_data
+            namespace_data = net_namespace_data.get(CONFIGURATIONS)
+            if namespace_data:
+                self._update_cache_and_file(namespace_data, namespace)
+        return namespace_data

+ 41 - 0
api/configs/remote_settings_sources/apollo/python_3x.py

@@ -0,0 +1,41 @@
+import logging
+import os
+import ssl
+import urllib.request
+from urllib import parse
+from urllib.error import HTTPError
+
+# Create an SSL context that allows for a lower level of security
+ssl_context = ssl.create_default_context()
+ssl_context.set_ciphers("HIGH:!DH:!aNULL")
+ssl_context.check_hostname = False
+ssl_context.verify_mode = ssl.CERT_NONE
+
+# Create an opener object and pass in a custom SSL context
+opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=ssl_context))
+
+urllib.request.install_opener(opener)
+
+logger = logging.getLogger(__name__)
+
+
+def http_request(url, timeout, headers={}):
+    try:
+        request = urllib.request.Request(url, headers=headers)
+        res = urllib.request.urlopen(request, timeout=timeout)
+        body = res.read().decode("utf-8")
+        return res.code, body
+    except HTTPError as e:
+        if e.code == 304:
+            logger.warning("http_request error,code is 304, maybe you should check secret")
+            return 304, None
+        logger.warning("http_request error,code is %d, msg is %s", e.code, e.msg)
+        raise e
+
+
+def url_encode(params):
+    return parse.urlencode(params)
+
+
+def makedirs_wrapper(path):
+    os.makedirs(path, exist_ok=True)

+ 51 - 0
api/configs/remote_settings_sources/apollo/utils.py

@@ -0,0 +1,51 @@
+import hashlib
+import socket
+
+from .python_3x import url_encode
+
+# define constants
+CONFIGURATIONS = "configurations"
+NOTIFICATION_ID = "notificationId"
+NAMESPACE_NAME = "namespaceName"
+
+
+# add timestamps uris and keys
+def signature(timestamp, uri, secret):
+    import base64
+    import hmac
+
+    string_to_sign = "" + timestamp + "\n" + uri
+    hmac_code = hmac.new(secret.encode(), string_to_sign.encode(), hashlib.sha1).digest()
+    return base64.b64encode(hmac_code).decode()
+
+
+def url_encode_wrapper(params):
+    return url_encode(params)
+
+
+def no_key_cache_key(namespace, key):
+    return "{}{}{}".format(namespace, len(namespace), key)
+
+
+# Returns whether the obtained value is obtained, and None if it does not
+def get_value_from_dict(namespace_cache, key):
+    if namespace_cache:
+        kv_data = namespace_cache.get(CONFIGURATIONS)
+        if kv_data is None:
+            return None
+        if key in kv_data:
+            return kv_data[key]
+    return None
+
+
+def init_ip():
+    ip = ""
+    s = None
+    try:
+        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        s.connect(("8.8.8.8", 53))
+        ip = s.getsockname()[0]
+    finally:
+        if s:
+            s.close()
+    return ip

+ 15 - 0
api/configs/remote_settings_sources/base.py

@@ -0,0 +1,15 @@
+from collections.abc import Mapping
+from typing import Any
+
+from pydantic.fields import FieldInfo
+
+
+class RemoteSettingsSource:
+    def __init__(self, configs: Mapping[str, Any]):
+        pass
+
+    def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+        raise NotImplementedError
+
+    def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
+        return value

+ 5 - 0
api/configs/remote_settings_sources/enums.py

@@ -0,0 +1,5 @@
+from enum import StrEnum
+
+
+class RemoteSettingsSourceName(StrEnum):
+    APOLLO = "apollo"

+ 3 - 3
api/constants/__init__.py

@@ -14,11 +14,11 @@ AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
 
 
 if dify_config.ETL_TYPE == "Unstructured":
-    DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls"]
-    DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
+    DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
+    DOCUMENT_EXTENSIONS.extend(("doc", "docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
     if dify_config.UNSTRUCTURED_API_URL:
         DOCUMENT_EXTENSIONS.append("ppt")
     DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
 else:
-    DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
+    DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
     DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])

+ 2 - 0
api/constants/languages.py

@@ -17,6 +17,8 @@ language_timezone_mapping = {
     "hi-IN": "Asia/Kolkata",
     "tr-TR": "Europe/Istanbul",
     "fa-IR": "Asia/Tehran",
+    "sl-SI": "Europe/Ljubljana",
+    "th-TH": "Asia/Bangkok",
 }
 
 languages = list(language_timezone_mapping.keys())

+ 2 - 1
api/constants/model_template.py

@@ -1,8 +1,9 @@
 import json
+from collections.abc import Mapping
 
 from models.model import AppMode
 
-default_app_templates = {
+default_app_templates: Mapping[AppMode, Mapping] = {
     # workflow default mode
     AppMode.WORKFLOW: {
         "app": {

+ 5 - 0
api/controllers/common/errors.py

@@ -4,3 +4,8 @@ from werkzeug.exceptions import HTTPException
 class FilenameNotExistsError(HTTPException):
     code = 400
     description = "The specified filename does not exist."
+
+
+class RemoteFileUploadError(HTTPException):
+    code = 400
+    description = "Error uploading remote file."

+ 1 - 1
api/controllers/common/fields.py

@@ -1,4 +1,4 @@
-from flask_restful import fields
+from flask_restful import fields  # type: ignore
 
 parameters__system_parameters = {
     "image_file_size_limit": fields.Integer,

+ 27 - 0
api/controllers/common/helpers.py

@@ -1,12 +1,32 @@
 import mimetypes
 import os
+import platform
 import re
 import urllib.parse
+import warnings
 from collections.abc import Mapping
 from typing import Any
 from uuid import uuid4
 
 import httpx
+
+try:
+    import magic
+except ImportError:
+    if platform.system() == "Windows":
+        warnings.warn(
+            "To use python-magic guess MIMETYPE, you need to run `pip install python-magic-bin`", stacklevel=2
+        )
+    elif platform.system() == "Darwin":
+        warnings.warn("To use python-magic guess MIMETYPE, you need to run `brew install libmagic`", stacklevel=2)
+    elif platform.system() == "Linux":
+        warnings.warn(
+            "To use python-magic guess MIMETYPE, you need to run `sudo apt-get install libmagic1`", stacklevel=2
+        )
+    else:
+        warnings.warn("To use python-magic guess MIMETYPE, you need to install `libmagic`", stacklevel=2)
+    magic = None  # type: ignore
+
 from pydantic import BaseModel
 
 from configs import dify_config
@@ -47,6 +67,13 @@ def guess_file_info_from_response(response: httpx.Response):
         # If guessing fails, use Content-Type from response headers
         mimetype = response.headers.get("Content-Type", "application/octet-stream")
 
+    # Use python-magic to guess MIME type if still unknown or generic
+    if mimetype == "application/octet-stream" and magic is not None:
+        try:
+            mimetype = magic.from_buffer(response.content[:1024], mime=True)
+        except magic.MagicException:
+            pass
+
     extension = os.path.splitext(filename)[1]
 
     # Ensure filename has an extension

+ 95 - 5
api/controllers/console/__init__.py

@@ -2,6 +2,26 @@ from flask import Blueprint
 
 from libs.external_api import ExternalApi
 
+from .app.app_import import AppImportApi, AppImportConfirmApi
+from .explore.audio import ChatAudioApi, ChatTextApi
+from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
+from .explore.conversation import (
+    ConversationApi,
+    ConversationListApi,
+    ConversationPinApi,
+    ConversationRenameApi,
+    ConversationUnPinApi,
+)
+from .explore.message import (
+    MessageFeedbackApi,
+    MessageListApi,
+    MessageMoreLikeThisApi,
+    MessageSuggestedQuestionApi,
+)
+from .explore.workflow import (
+    InstalledAppWorkflowRunApi,
+    InstalledAppWorkflowTaskStopApi,
+)
 from .files import FileApi, FilePreviewApi, FileSupportTypeApi
 from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
 
@@ -17,6 +37,10 @@ api.add_resource(FileSupportTypeApi, "/files/support-type")
 api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>")
 api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
 
+# Import App
+api.add_resource(AppImportApi, "/apps/imports")
+api.add_resource(AppImportConfirmApi, "/apps/imports/<string:import_id>/confirm")
+
 # Import other controllers
 from . import admin, apikey, extension, feature, ping, setup, version
 
@@ -61,15 +85,81 @@ from .datasets import (
 
 # Import explore controllers
 from .explore import (
-    audio,
-    completion,
-    conversation,
     installed_app,
-    message,
     parameter,
     recommended_app,
     saved_message,
-    workflow,
+)
+
+# Explore Audio
+api.add_resource(ChatAudioApi, "/installed-apps/<uuid:installed_app_id>/audio-to-text", endpoint="installed_app_audio")
+api.add_resource(ChatTextApi, "/installed-apps/<uuid:installed_app_id>/text-to-audio", endpoint="installed_app_text")
+
+# Explore Completion
+api.add_resource(
+    CompletionApi, "/installed-apps/<uuid:installed_app_id>/completion-messages", endpoint="installed_app_completion"
+)
+api.add_resource(
+    CompletionStopApi,
+    "/installed-apps/<uuid:installed_app_id>/completion-messages/<string:task_id>/stop",
+    endpoint="installed_app_stop_completion",
+)
+api.add_resource(
+    ChatApi, "/installed-apps/<uuid:installed_app_id>/chat-messages", endpoint="installed_app_chat_completion"
+)
+api.add_resource(
+    ChatStopApi,
+    "/installed-apps/<uuid:installed_app_id>/chat-messages/<string:task_id>/stop",
+    endpoint="installed_app_stop_chat_completion",
+)
+
+# Explore Conversation
+api.add_resource(
+    ConversationRenameApi,
+    "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/name",
+    endpoint="installed_app_conversation_rename",
+)
+api.add_resource(
+    ConversationListApi, "/installed-apps/<uuid:installed_app_id>/conversations", endpoint="installed_app_conversations"
+)
+api.add_resource(
+    ConversationApi,
+    "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>",
+    endpoint="installed_app_conversation",
+)
+api.add_resource(
+    ConversationPinApi,
+    "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/pin",
+    endpoint="installed_app_conversation_pin",
+)
+api.add_resource(
+    ConversationUnPinApi,
+    "/installed-apps/<uuid:installed_app_id>/conversations/<uuid:c_id>/unpin",
+    endpoint="installed_app_conversation_unpin",
+)
+
+
+# Explore Message
+api.add_resource(MessageListApi, "/installed-apps/<uuid:installed_app_id>/messages", endpoint="installed_app_messages")
+api.add_resource(
+    MessageFeedbackApi,
+    "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/feedbacks",
+    endpoint="installed_app_message_feedback",
+)
+api.add_resource(
+    MessageMoreLikeThisApi,
+    "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/more-like-this",
+    endpoint="installed_app_more_like_this",
+)
+api.add_resource(
+    MessageSuggestedQuestionApi,
+    "/installed-apps/<uuid:installed_app_id>/messages/<uuid:message_id>/suggested-questions",
+    endpoint="installed_app_suggested_question",
+)
+# Explore Workflow
+api.add_resource(InstalledAppWorkflowRunApi, "/installed-apps/<uuid:installed_app_id>/workflows/run")
+api.add_resource(
+    InstalledAppWorkflowTaskStopApi, "/installed-apps/<uuid:installed_app_id>/workflows/tasks/<string:task_id>/stop"
 )
 
 # Import tag controllers

+ 3 - 3
api/controllers/console/admin.py

@@ -1,7 +1,7 @@
 from functools import wraps
 
 from flask import request
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 from werkzeug.exceptions import NotFound, Unauthorized
 
 from configs import dify_config
@@ -31,7 +31,7 @@ def admin_required(view):
         if auth_scheme != "bearer":
             raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
 
-        if dify_config.ADMIN_API_KEY != auth_token:
+        if auth_token != dify_config.ADMIN_API_KEY:
             raise Unauthorized("API key is invalid.")
 
         return view(*args, **kwargs)
@@ -56,7 +56,7 @@ class InsertExploreAppListApi(Resource):
 
         app = App.query.filter(App.id == args["app_id"]).first()
         if not app:
-            raise NotFound(f'App \'{args["app_id"]}\' is not found')
+            raise NotFound(f"App '{args['app_id']}' is not found")
 
         site = app.site
         if not site:

+ 14 - 9
api/controllers/console/apikey.py

@@ -1,5 +1,7 @@
-import flask_restful
-from flask_login import current_user
+from typing import Any
+
+import flask_restful  # type: ignore
+from flask_login import current_user  # type: ignore
 from flask_restful import Resource, fields, marshal_with
 from werkzeug.exceptions import Forbidden
 
@@ -35,14 +37,15 @@ def _get_resource(resource_id, tenant_id, resource_model):
 class BaseApiKeyListResource(Resource):
     method_decorators = [account_initialization_required, login_required, setup_required]
 
-    resource_type = None
-    resource_model = None
-    resource_id_field = None
-    token_prefix = None
+    resource_type: str | None = None
+    resource_model: Any = None
+    resource_id_field: str | None = None
+    token_prefix: str | None = None
     max_keys = 10
 
     @marshal_with(api_key_list)
     def get(self, resource_id):
+        assert self.resource_id_field is not None, "resource_id_field must be set"
         resource_id = str(resource_id)
         _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
         keys = (
@@ -54,6 +57,7 @@ class BaseApiKeyListResource(Resource):
 
     @marshal_with(api_key_fields)
     def post(self, resource_id):
+        assert self.resource_id_field is not None, "resource_id_field must be set"
         resource_id = str(resource_id)
         _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
         if not current_user.is_editor:
@@ -86,11 +90,12 @@ class BaseApiKeyListResource(Resource):
 class BaseApiKeyResource(Resource):
     method_decorators = [account_initialization_required, login_required, setup_required]
 
-    resource_type = None
-    resource_model = None
-    resource_id_field = None
+    resource_type: str | None = None
+    resource_model: Any = None
+    resource_id_field: str | None = None
 
     def delete(self, resource_id, api_key_id):
+        assert self.resource_id_field is not None, "resource_id_field must be set"
         resource_id = str(resource_id)
         api_key_id = str(api_key_id)
         _get_resource(resource_id, current_user.current_tenant_id, self.resource_model)

+ 1 - 1
api/controllers/console/app/advanced_prompt_template.py

@@ -1,4 +1,4 @@
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 
 from controllers.console import api
 from controllers.console.wraps import account_initialization_required, setup_required

+ 1 - 1
api/controllers/console/app/agent.py

@@ -1,4 +1,4 @@
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model

+ 3 - 3
api/controllers/console/app/annotation.py

@@ -1,6 +1,6 @@
 from flask import request
-from flask_login import current_user
-from flask_restful import Resource, marshal, marshal_with, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, marshal, marshal_with, reqparse  # type: ignore
 from werkzeug.exceptions import Forbidden
 
 from controllers.console import api
@@ -110,7 +110,7 @@ class AnnotationListApi(Resource):
 
         page = request.args.get("page", default=1, type=int)
         limit = request.args.get("limit", default=20, type=int)
-        keyword = request.args.get("keyword", default=None, type=str)
+        keyword = request.args.get("keyword", default="", type=str)
 
         app_id = str(app_id)
         annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)

+ 31 - 65
api/controllers/console/app/app.py

@@ -1,7 +1,10 @@
 import uuid
+from typing import cast
 
-from flask_login import current_user
-from flask_restful import Resource, inputs, marshal, marshal_with, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, inputs, marshal, marshal_with, reqparse  # type: ignore
+from sqlalchemy import select
+from sqlalchemy.orm import Session
 from werkzeug.exceptions import BadRequest, Forbidden, abort
 
 from controllers.console import api
@@ -9,16 +12,19 @@ from controllers.console.app.wraps import get_app_model
 from controllers.console.wraps import (
     account_initialization_required,
     cloud_edition_billing_resource_check,
+    enterprise_license_required,
     setup_required,
 )
 from core.ops.ops_trace_manager import OpsTraceManager
+from extensions.ext_database import db
 from fields.app_fields import (
     app_detail_fields,
     app_detail_fields_with_site,
     app_pagination_fields,
 )
 from libs.login import login_required
-from services.app_dsl_service import AppDslService
+from models import Account, App
+from services.app_dsl_service import AppDslService, ImportMode
 from services.app_service import AppService
 
 ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
@@ -28,6 +34,7 @@ class AppListApi(Resource):
     @setup_required
     @login_required
     @account_initialization_required
+    @enterprise_license_required
     def get(self):
         """Get app list"""
 
@@ -50,12 +57,13 @@ class AppListApi(Resource):
         )
         parser.add_argument("name", type=str, location="args", required=False)
         parser.add_argument("tag_ids", type=uuid_list, location="args", required=False)
+        parser.add_argument("is_created_by_me", type=inputs.boolean, location="args", required=False)
 
         args = parser.parse_args()
 
         # get app list
         app_service = AppService()
-        app_pagination = app_service.get_paginate_apps(current_user.current_tenant_id, args)
+        app_pagination = app_service.get_paginate_apps(current_user.id, current_user.current_tenant_id, args)
         if not app_pagination:
             return {"data": [], "total": 0, "page": 1, "limit": 20, "has_more": False}
 
@@ -90,65 +98,11 @@ class AppListApi(Resource):
         return app, 201
 
 
-class AppImportApi(Resource):
-    @setup_required
-    @login_required
-    @account_initialization_required
-    @marshal_with(app_detail_fields_with_site)
-    @cloud_edition_billing_resource_check("apps")
-    def post(self):
-        """Import app"""
-        # The role of the current user in the ta table must be admin, owner, or editor
-        if not current_user.is_editor:
-            raise Forbidden()
-
-        parser = reqparse.RequestParser()
-        parser.add_argument("data", type=str, required=True, nullable=False, location="json")
-        parser.add_argument("name", type=str, location="json")
-        parser.add_argument("description", type=str, location="json")
-        parser.add_argument("icon_type", type=str, location="json")
-        parser.add_argument("icon", type=str, location="json")
-        parser.add_argument("icon_background", type=str, location="json")
-        args = parser.parse_args()
-
-        app = AppDslService.import_and_create_new_app(
-            tenant_id=current_user.current_tenant_id, data=args["data"], args=args, account=current_user
-        )
-
-        return app, 201
-
-
-class AppImportFromUrlApi(Resource):
-    @setup_required
-    @login_required
-    @account_initialization_required
-    @marshal_with(app_detail_fields_with_site)
-    @cloud_edition_billing_resource_check("apps")
-    def post(self):
-        """Import app from url"""
-        # The role of the current user in the ta table must be admin, owner, or editor
-        if not current_user.is_editor:
-            raise Forbidden()
-
-        parser = reqparse.RequestParser()
-        parser.add_argument("url", type=str, required=True, nullable=False, location="json")
-        parser.add_argument("name", type=str, location="json")
-        parser.add_argument("description", type=str, location="json")
-        parser.add_argument("icon", type=str, location="json")
-        parser.add_argument("icon_background", type=str, location="json")
-        args = parser.parse_args()
-
-        app = AppDslService.import_and_create_new_app_from_url(
-            tenant_id=current_user.current_tenant_id, url=args["url"], args=args, account=current_user
-        )
-
-        return app, 201
-
-
 class AppApi(Resource):
     @setup_required
     @login_required
     @account_initialization_required
+    @enterprise_license_required
     @get_app_model
     @marshal_with(app_detail_fields_with_site)
     def get(self, app_model):
@@ -221,10 +175,24 @@ class AppCopyApi(Resource):
         parser.add_argument("icon_background", type=str, location="json")
         args = parser.parse_args()
 
-        data = AppDslService.export_dsl(app_model=app_model, include_secret=True)
-        app = AppDslService.import_and_create_new_app(
-            tenant_id=current_user.current_tenant_id, data=data, args=args, account=current_user
-        )
+        with Session(db.engine) as session:
+            import_service = AppDslService(session)
+            yaml_content = import_service.export_dsl(app_model=app_model, include_secret=True)
+            account = cast(Account, current_user)
+            result = import_service.import_app(
+                account=account,
+                import_mode=ImportMode.YAML_CONTENT.value,
+                yaml_content=yaml_content,
+                name=args.get("name"),
+                description=args.get("description"),
+                icon_type=args.get("icon_type"),
+                icon=args.get("icon"),
+                icon_background=args.get("icon_background"),
+            )
+            session.commit()
+
+            stmt = select(App).where(App.id == result.app_id)
+            app = session.scalar(stmt)
 
         return app, 201
 
@@ -365,8 +333,6 @@ class AppTraceApi(Resource):
 
 
 api.add_resource(AppListApi, "/apps")
-api.add_resource(AppImportApi, "/apps/import")
-api.add_resource(AppImportFromUrlApi, "/apps/import/url")
 api.add_resource(AppApi, "/apps/<uuid:app_id>")
 api.add_resource(AppCopyApi, "/apps/<uuid:app_id>/copy")
 api.add_resource(AppExportApi, "/apps/<uuid:app_id>/export")

+ 90 - 0
api/controllers/console/app/app_import.py

@@ -0,0 +1,90 @@
+from typing import cast
+
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
+from sqlalchemy.orm import Session
+from werkzeug.exceptions import Forbidden
+
+from controllers.console.wraps import (
+    account_initialization_required,
+    setup_required,
+)
+from extensions.ext_database import db
+from fields.app_fields import app_import_fields
+from libs.login import login_required
+from models import Account
+from services.app_dsl_service import AppDslService, ImportStatus
+
+
+class AppImportApi(Resource):
+    @setup_required
+    @login_required
+    @account_initialization_required
+    @marshal_with(app_import_fields)
+    def post(self):
+        # Check user role first
+        if not current_user.is_editor:
+            raise Forbidden()
+
+        parser = reqparse.RequestParser()
+        parser.add_argument("mode", type=str, required=True, location="json")
+        parser.add_argument("yaml_content", type=str, location="json")
+        parser.add_argument("yaml_url", type=str, location="json")
+        parser.add_argument("name", type=str, location="json")
+        parser.add_argument("description", type=str, location="json")
+        parser.add_argument("icon_type", type=str, location="json")
+        parser.add_argument("icon", type=str, location="json")
+        parser.add_argument("icon_background", type=str, location="json")
+        parser.add_argument("app_id", type=str, location="json")
+        args = parser.parse_args()
+
+        # Create service with session
+        with Session(db.engine) as session:
+            import_service = AppDslService(session)
+            # Import app
+            account = cast(Account, current_user)
+            result = import_service.import_app(
+                account=account,
+                import_mode=args["mode"],
+                yaml_content=args.get("yaml_content"),
+                yaml_url=args.get("yaml_url"),
+                name=args.get("name"),
+                description=args.get("description"),
+                icon_type=args.get("icon_type"),
+                icon=args.get("icon"),
+                icon_background=args.get("icon_background"),
+                app_id=args.get("app_id"),
+            )
+            session.commit()
+
+        # Return appropriate status code based on result
+        status = result.status
+        if status == ImportStatus.FAILED.value:
+            return result.model_dump(mode="json"), 400
+        elif status == ImportStatus.PENDING.value:
+            return result.model_dump(mode="json"), 202
+        return result.model_dump(mode="json"), 200
+
+
+class AppImportConfirmApi(Resource):
+    @setup_required
+    @login_required
+    @account_initialization_required
+    @marshal_with(app_import_fields)
+    def post(self, import_id):
+        # Check user role first
+        if not current_user.is_editor:
+            raise Forbidden()
+
+        # Create service with session
+        with Session(db.engine) as session:
+            import_service = AppDslService(session)
+            # Confirm import
+            account = cast(Account, current_user)
+            result = import_service.confirm_import(import_id=import_id, account=account)
+            session.commit()
+
+        # Return appropriate status code based on result
+        if result.status == ImportStatus.FAILED.value:
+            return result.model_dump(mode="json"), 400
+        return result.model_dump(mode="json"), 200

+ 10 - 6
api/controllers/console/app/audio.py

@@ -1,7 +1,7 @@
 import logging
 
 from flask import request
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 from werkzeug.exceptions import InternalServerError
 
 import services
@@ -22,7 +22,7 @@ from controllers.console.wraps import account_initialization_required, setup_req
 from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
 from core.model_runtime.errors.invoke import InvokeError
 from libs.login import login_required
-from models.model import AppMode
+from models import App, AppMode
 from services.audio_service import AudioService
 from services.errors.audio import (
     AudioTooLargeServiceError,
@@ -70,7 +70,7 @@ class ChatMessageAudioApi(Resource):
         except ValueError as e:
             raise e
         except Exception as e:
-            logging.exception(f"internal server error, {str(e)}.")
+            logging.exception("Failed to handle post request to ChatMessageAudioApi")
             raise InternalServerError()
 
 
@@ -79,7 +79,7 @@ class ChatMessageTextApi(Resource):
     @login_required
     @account_initialization_required
     @get_app_model
-    def post(self, app_model):
+    def post(self, app_model: App):
         from werkzeug.exceptions import InternalServerError
 
         try:
@@ -98,9 +98,13 @@ class ChatMessageTextApi(Resource):
                 and app_model.workflow.features_dict
             ):
                 text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
+                if text_to_speech is None:
+                    raise ValueError("TTS is not enabled")
                 voice = args.get("voice") or text_to_speech.get("voice")
             else:
                 try:
+                    if app_model.app_model_config is None:
+                        raise ValueError("AppModelConfig not found")
                     voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
                 except Exception:
                     voice = None
@@ -128,7 +132,7 @@ class ChatMessageTextApi(Resource):
         except ValueError as e:
             raise e
         except Exception as e:
-            logging.exception(f"internal server error, {str(e)}.")
+            logging.exception("Failed to handle post request to ChatMessageTextApi")
             raise InternalServerError()
 
 
@@ -170,7 +174,7 @@ class TextModesApi(Resource):
         except ValueError as e:
             raise e
         except Exception as e:
-            logging.exception(f"internal server error, {str(e)}.")
+            logging.exception("Failed to handle get request to TextModesApi")
             raise InternalServerError()
 
 

+ 4 - 5
api/controllers/console/app/completion.py

@@ -1,7 +1,7 @@
 import logging
 
-import flask_login
-from flask_restful import Resource, reqparse
+import flask_login  # type: ignore
+from flask_restful import Resource, reqparse  # type: ignore
 from werkzeug.exceptions import InternalServerError, NotFound
 
 import services
@@ -20,7 +20,6 @@ from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpErr
 from core.app.apps.base_app_queue_manager import AppQueueManager
 from core.app.entities.app_invoke_entities import InvokeFrom
 from core.errors.error import (
-    AppInvokeQuotaExceededError,
     ModelCurrentlyNotSupportError,
     ProviderTokenNotInitError,
     QuotaExceededError,
@@ -76,7 +75,7 @@ class CompletionMessageApi(Resource):
             raise ProviderModelCurrentlyNotSupportError()
         except InvokeError as e:
             raise CompletionRequestError(e.description)
-        except (ValueError, AppInvokeQuotaExceededError) as e:
+        except ValueError as e:
             raise e
         except Exception as e:
             logging.exception("internal server error.")
@@ -141,7 +140,7 @@ class ChatMessageApi(Resource):
             raise InvokeRateLimitHttpError(ex.description)
         except InvokeError as e:
             raise CompletionRequestError(e.description)
-        except (ValueError, AppInvokeQuotaExceededError) as e:
+        except ValueError as e:
             raise e
         except Exception as e:
             logging.exception("internal server error.")

+ 10 - 9
api/controllers/console/app/conversation.py

@@ -1,9 +1,9 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
-import pytz
-from flask_login import current_user
-from flask_restful import Resource, marshal_with, reqparse
-from flask_restful.inputs import int_range
+import pytz  # pip install pytz
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
+from flask_restful.inputs import int_range  # type: ignore
 from sqlalchemy import func, or_
 from sqlalchemy.orm import joinedload
 from werkzeug.exceptions import Forbidden, NotFound
@@ -77,8 +77,9 @@ class CompletionConversationApi(Resource):
 
             query = query.where(Conversation.created_at < end_datetime_utc)
 
+        # FIXME, the type ignore in this file
         if args["annotation_status"] == "annotated":
-            query = query.options(joinedload(Conversation.message_annotations)).join(
+            query = query.options(joinedload(Conversation.message_annotations)).join(  # type: ignore
                 MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
             )
         elif args["annotation_status"] == "not_annotated":
@@ -222,7 +223,7 @@ class ChatConversationApi(Resource):
                     query = query.where(Conversation.created_at <= end_datetime_utc)
 
         if args["annotation_status"] == "annotated":
-            query = query.options(joinedload(Conversation.message_annotations)).join(
+            query = query.options(joinedload(Conversation.message_annotations)).join(  # type: ignore
                 MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id
             )
         elif args["annotation_status"] == "not_annotated":
@@ -234,7 +235,7 @@ class ChatConversationApi(Resource):
 
         if args["message_count_gte"] and args["message_count_gte"] >= 1:
             query = (
-                query.options(joinedload(Conversation.messages))
+                query.options(joinedload(Conversation.messages))  # type: ignore
                 .join(Message, Message.conversation_id == Conversation.id)
                 .group_by(Conversation.id)
                 .having(func.count(Message.id) >= args["message_count_gte"])
@@ -314,7 +315,7 @@ def _get_conversation(app_model, conversation_id):
         raise NotFound("Conversation Not Exists.")
 
     if not conversation.read_at:
-        conversation.read_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
         conversation.read_account_id = current_user.id
         db.session.commit()
 

+ 1 - 1
api/controllers/console/app/conversation_variables.py

@@ -1,4 +1,4 @@
-from flask_restful import Resource, marshal_with, reqparse
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
 from sqlalchemy import select
 from sqlalchemy.orm import Session
 

+ 2 - 2
api/controllers/console/app/generator.py

@@ -1,7 +1,7 @@
 import os
 
-from flask_login import current_user
-from flask_restful import Resource, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, reqparse  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.error import (

+ 3 - 3
api/controllers/console/app/message.py

@@ -1,8 +1,8 @@
 import logging
 
-from flask_login import current_user
-from flask_restful import Resource, fields, marshal_with, reqparse
-from flask_restful.inputs import int_range
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, fields, marshal_with, reqparse  # type: ignore
+from flask_restful.inputs import int_range  # type: ignore
 from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
 
 from controllers.console import api

+ 11 - 6
api/controllers/console/app/model_config.py

@@ -1,8 +1,9 @@
 import json
+from typing import cast
 
 from flask import request
-from flask_login import current_user
-from flask_restful import Resource
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model
@@ -26,7 +27,9 @@ class ModelConfigResource(Resource):
         """Modify app model config"""
         # validate config
         model_configuration = AppModelConfigService.validate_configuration(
-            tenant_id=current_user.current_tenant_id, config=request.json, app_mode=AppMode.value_of(app_model.mode)
+            tenant_id=current_user.current_tenant_id,
+            config=cast(dict, request.json),
+            app_mode=AppMode.value_of(app_model.mode),
         )
 
         new_app_model_config = AppModelConfig(
@@ -38,9 +41,11 @@ class ModelConfigResource(Resource):
 
         if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
             # get original app model config
-            original_app_model_config: AppModelConfig = (
+            original_app_model_config = (
                 db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
             )
+            if original_app_model_config is None:
+                raise ValueError("Original app model config not found")
             agent_mode = original_app_model_config.agent_mode_dict
             # decrypt agent tool parameters if it's secret-input
             parameter_map = {}
@@ -65,7 +70,7 @@ class ModelConfigResource(Resource):
                         provider_type=agent_tool_entity.provider_type,
                         identity_id=f"AGENT.{app_model.id}",
                     )
-                except Exception as e:
+                except Exception:
                     continue
 
                 # get decrypted parameters
@@ -97,7 +102,7 @@ class ModelConfigResource(Resource):
                             app_id=app_model.id,
                             agent_tool=agent_tool_entity,
                         )
-                    except Exception as e:
+                    except Exception:
                         continue
 
                 manager = ToolParameterConfigurationManager(

+ 6 - 5
api/controllers/console/app/ops_trace.py

@@ -1,4 +1,5 @@
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
+from werkzeug.exceptions import BadRequest
 
 from controllers.console import api
 from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
@@ -26,7 +27,7 @@ class TraceAppConfigApi(Resource):
                 return {"has_not_configured": True}
             return trace_config
         except Exception as e:
-            raise e
+            raise BadRequest(str(e))
 
     @setup_required
     @login_required
@@ -48,7 +49,7 @@ class TraceAppConfigApi(Resource):
                 raise TracingConfigCheckError()
             return result
         except Exception as e:
-            raise e
+            raise BadRequest(str(e))
 
     @setup_required
     @login_required
@@ -68,7 +69,7 @@ class TraceAppConfigApi(Resource):
                 raise TracingConfigNotExist()
             return {"result": "success"}
         except Exception as e:
-            raise e
+            raise BadRequest(str(e))
 
     @setup_required
     @login_required
@@ -85,7 +86,7 @@ class TraceAppConfigApi(Resource):
                 raise TracingConfigNotExist()
             return {"result": "success"}
         except Exception as e:
-            raise e
+            raise BadRequest(str(e))
 
 
 api.add_resource(TraceAppConfigApi, "/apps/<uuid:app_id>/trace-config")

+ 6 - 6
api/controllers/console/app/site.py

@@ -1,7 +1,7 @@
-from datetime import datetime, timezone
+from datetime import UTC, datetime
 
-from flask_login import current_user
-from flask_restful import Resource, marshal_with, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
 from werkzeug.exceptions import Forbidden, NotFound
 
 from constants.languages import supported_language
@@ -50,7 +50,7 @@ class AppSite(Resource):
         if not current_user.is_editor:
             raise Forbidden()
 
-        site = db.session.query(Site).filter(Site.app_id == app_model.id).one_or_404()
+        site = Site.query.filter(Site.app_id == app_model.id).one_or_404()
 
         for attr_name in [
             "title",
@@ -75,7 +75,7 @@ class AppSite(Resource):
                 setattr(site, attr_name, value)
 
         site.updated_by = current_user.id
-        site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        site.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return site
@@ -99,7 +99,7 @@ class AppSiteAccessTokenReset(Resource):
 
         site.code = Site.generate_code(16)
         site.updated_by = current_user.id
-        site.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
+        site.updated_at = datetime.now(UTC).replace(tzinfo=None)
         db.session.commit()
 
         return site

+ 3 - 4
api/controllers/console/app/statistic.py

@@ -3,8 +3,8 @@ from decimal import Decimal
 
 import pytz
 from flask import jsonify
-from flask_login import current_user
-from flask_restful import Resource, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, reqparse  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model
@@ -273,8 +273,7 @@ FROM
             messages m
             ON c.id = m.conversation_id
         WHERE
-            c.override_model_configs IS NULL
-            AND c.app_id = :app_id"""
+            c.app_id = :app_id"""
         arg_dict = {"tz": account.timezone, "app_id": app_model.id}
 
         timezone = pytz.timezone(account.timezone)

+ 46 - 32
api/controllers/console/app/workflow.py

@@ -2,10 +2,11 @@ import json
 import logging
 
 from flask import abort, request
-from flask_restful import Resource, marshal_with, reqparse
+from flask_restful import Resource, inputs, marshal_with, reqparse  # type: ignore
 from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
 
 import services
+from configs import dify_config
 from controllers.console import api
 from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
 from controllers.console.app.wraps import get_app_model
@@ -13,14 +14,13 @@ from controllers.console.wraps import account_initialization_required, setup_req
 from core.app.apps.base_app_queue_manager import AppQueueManager
 from core.app.entities.app_invoke_entities import InvokeFrom
 from factories import variable_factory
-from fields.workflow_fields import workflow_fields
+from fields.workflow_fields import workflow_fields, workflow_pagination_fields
 from fields.workflow_run_fields import workflow_run_node_execution_fields
 from libs import helper
 from libs.helper import TimestampField, uuid_value
 from libs.login import current_user, login_required
 from models import App
 from models.model import AppMode
-from services.app_dsl_service import AppDslService
 from services.app_generate_service import AppGenerateService
 from services.errors.app import WorkflowHashNotEqualError
 from services.workflow_service import WorkflowService
@@ -101,11 +101,11 @@ class DraftWorkflowApi(Resource):
         try:
             environment_variables_list = args.get("environment_variables") or []
             environment_variables = [
-                variable_factory.build_variable_from_mapping(obj) for obj in environment_variables_list
+                variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
             ]
             conversation_variables_list = args.get("conversation_variables") or []
             conversation_variables = [
-                variable_factory.build_variable_from_mapping(obj) for obj in conversation_variables_list
+                variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
             ]
             workflow = workflow_service.sync_draft_workflow(
                 app_model=app_model,
@@ -126,31 +126,6 @@ class DraftWorkflowApi(Resource):
         }
 
 
-class DraftWorkflowImportApi(Resource):
-    @setup_required
-    @login_required
-    @account_initialization_required
-    @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
-    @marshal_with(workflow_fields)
-    def post(self, app_model: App):
-        """
-        Import draft workflow
-        """
-        # The role of the current user in the ta table must be admin, owner, or editor
-        if not current_user.is_editor:
-            raise Forbidden()
-
-        parser = reqparse.RequestParser()
-        parser.add_argument("data", type=str, required=True, nullable=False, location="json")
-        args = parser.parse_args()
-
-        workflow = AppDslService.import_and_overwrite_workflow(
-            app_model=app_model, data=args["data"], account=current_user
-        )
-
-        return workflow
-
-
 class AdvancedChatDraftWorkflowRunApi(Resource):
     @setup_required
     @login_required
@@ -408,7 +383,7 @@ class DefaultBlockConfigApi(Resource):
         filters = None
         if args.get("q"):
             try:
-                filters = json.loads(args.get("q"))
+                filters = json.loads(args.get("q", ""))
             except json.JSONDecodeError:
                 raise ValueError("Invalid filters")
 
@@ -452,8 +427,46 @@ class ConvertToWorkflowApi(Resource):
         }
 
 
+class WorkflowConfigApi(Resource):
+    """Resource for workflow configuration."""
+
+    @setup_required
+    @login_required
+    @account_initialization_required
+    @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
+    def get(self, app_model: App):
+        return {
+            "parallel_depth_limit": dify_config.WORKFLOW_PARALLEL_DEPTH_LIMIT,
+        }
+
+
+class PublishedAllWorkflowApi(Resource):
+    @setup_required
+    @login_required
+    @account_initialization_required
+    @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
+    @marshal_with(workflow_pagination_fields)
+    def get(self, app_model: App):
+        """
+        Get published workflows
+        """
+        if not current_user.is_editor:
+            raise Forbidden()
+
+        parser = reqparse.RequestParser()
+        parser.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
+        parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
+        args = parser.parse_args()
+        page = args.get("page")
+        limit = args.get("limit")
+        workflow_service = WorkflowService()
+        workflows, has_more = workflow_service.get_all_published_workflow(app_model=app_model, page=page, limit=limit)
+
+        return {"items": workflows, "page": page, "limit": limit, "has_more": has_more}
+
+
 api.add_resource(DraftWorkflowApi, "/apps/<uuid:app_id>/workflows/draft")
-api.add_resource(DraftWorkflowImportApi, "/apps/<uuid:app_id>/workflows/draft/import")
+api.add_resource(WorkflowConfigApi, "/apps/<uuid:app_id>/workflows/draft/config")
 api.add_resource(AdvancedChatDraftWorkflowRunApi, "/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
 api.add_resource(DraftWorkflowRunApi, "/apps/<uuid:app_id>/workflows/draft/run")
 api.add_resource(WorkflowTaskStopApi, "/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
@@ -466,6 +479,7 @@ api.add_resource(
     WorkflowDraftRunIterationNodeApi, "/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run"
 )
 api.add_resource(PublishedWorkflowApi, "/apps/<uuid:app_id>/workflows/publish")
+api.add_resource(PublishedAllWorkflowApi, "/apps/<uuid:app_id>/workflows")
 api.add_resource(DefaultBlockConfigsApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
 api.add_resource(
     DefaultBlockConfigApi, "/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>"

+ 2 - 2
api/controllers/console/app/workflow_app_log.py

@@ -1,5 +1,5 @@
-from flask_restful import Resource, marshal_with, reqparse
-from flask_restful.inputs import int_range
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
+from flask_restful.inputs import int_range  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model

+ 2 - 2
api/controllers/console/app/workflow_run.py

@@ -1,5 +1,5 @@
-from flask_restful import Resource, marshal_with, reqparse
-from flask_restful.inputs import int_range
+from flask_restful import Resource, marshal_with, reqparse  # type: ignore
+from flask_restful.inputs import int_range  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model

+ 2 - 2
api/controllers/console/app/workflow_statistic.py

@@ -3,8 +3,8 @@ from decimal import Decimal
 
 import pytz
 from flask import jsonify
-from flask_login import current_user
-from flask_restful import Resource, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, reqparse  # type: ignore
 
 from controllers.console import api
 from controllers.console.app.wraps import get_app_model

+ 2 - 3
api/controllers/console/app/wraps.py

@@ -5,11 +5,10 @@ from typing import Optional, Union
 from controllers.console.app.error import AppNotFoundError
 from extensions.ext_database import db
 from libs.login import current_user
-from models import App
-from models.model import AppMode
+from models import App, AppMode
 
 
-def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode]] = None):
+def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
     def decorator(view_func):
         @wraps(view_func)
         def decorated_view(*args, **kwargs):

+ 4 - 4
api/controllers/console/auth/activate.py

@@ -1,14 +1,14 @@
 import datetime
 
 from flask import request
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 
 from constants.languages import supported_language
 from controllers.console import api
 from controllers.console.error import AlreadyActivateError
 from extensions.ext_database import db
 from libs.helper import StrLen, email, extract_remote_ip, timezone
-from models.account import AccountStatus, Tenant
+from models.account import AccountStatus
 from services.account_service import AccountService, RegisterService
 
 
@@ -27,7 +27,7 @@ class ActivateCheckApi(Resource):
         invitation = RegisterService.get_invitation_if_token_valid(workspaceId, reg_email, token)
         if invitation:
             data = invitation.get("data", {})
-            tenant: Tenant = invitation.get("tenant", None)
+            tenant = invitation.get("tenant", None)
             workspace_name = tenant.name if tenant else None
             workspace_id = tenant.id if tenant else None
             invitee_email = data.get("email") if data else None
@@ -65,7 +65,7 @@ class ActivateApi(Resource):
         account.timezone = args["timezone"]
         account.interface_theme = "light"
         account.status = AccountStatus.ACTIVE.value
-        account.initialized_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+        account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
         db.session.commit()
 
         token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

+ 2 - 2
api/controllers/console/auth/data_source_bearer_auth.py

@@ -1,5 +1,5 @@
-from flask_login import current_user
-from flask_restful import Resource, reqparse
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource, reqparse  # type: ignore
 from werkzeug.exceptions import Forbidden
 
 from controllers.console import api

+ 4 - 5
api/controllers/console/auth/data_source_oauth.py

@@ -2,8 +2,8 @@ import logging
 
 import requests
 from flask import current_app, redirect, request
-from flask_login import current_user
-from flask_restful import Resource
+from flask_login import current_user  # type: ignore
+from flask_restful import Resource  # type: ignore
 from werkzeug.exceptions import Forbidden
 
 from configs import dify_config
@@ -17,8 +17,8 @@ from ..wraps import account_initialization_required, setup_required
 def get_oauth_providers():
     with current_app.app_context():
         notion_oauth = NotionOAuth(
-            client_id=dify_config.NOTION_CLIENT_ID,
-            client_secret=dify_config.NOTION_CLIENT_SECRET,
+            client_id=dify_config.NOTION_CLIENT_ID or "",
+            client_secret=dify_config.NOTION_CLIENT_SECRET or "",
             redirect_uri=dify_config.CONSOLE_API_URL + "/console/api/oauth/data-source/callback/notion",
         )
 
@@ -34,7 +34,6 @@ class OAuthDataSource(Resource):
         OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
         with current_app.app_context():
             oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
-            print(vars(oauth_provider))
         if not oauth_provider:
             return {"error": "Invalid provider"}, 400
         if dify_config.NOTION_INTEGRATION_TYPE == "internal":

+ 12 - 0
api/controllers/console/auth/error.py

@@ -53,3 +53,15 @@ class EmailCodeLoginRateLimitExceededError(BaseHTTPException):
     error_code = "email_code_login_rate_limit_exceeded"
     description = "Too many login emails have been sent. Please try again in 5 minutes."
     code = 429
+
+
+class EmailCodeAccountDeletionRateLimitExceededError(BaseHTTPException):
+    error_code = "email_code_account_deletion_rate_limit_exceeded"
+    description = "Too many account deletion emails have been sent. Please try again in 5 minutes."
+    code = 429
+
+
+class EmailPasswordResetLimitError(BaseHTTPException):
+    error_code = "email_password_reset_limit"
+    description = "Too many failed password reset attempts. Please try again in 24 hours."
+    code = 429

+ 15 - 5
api/controllers/console/auth/forgot_password.py

@@ -2,17 +2,18 @@ import base64
 import secrets
 
 from flask import request
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 
 from constants.languages import languages
 from controllers.console import api
 from controllers.console.auth.error import (
     EmailCodeError,
+    EmailPasswordResetLimitError,
     InvalidEmailError,
     InvalidTokenError,
     PasswordMismatchError,
 )
-from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister
+from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
 from controllers.console.wraps import setup_required
 from events.tenant_event import tenant_was_created
 from extensions.ext_database import db
@@ -20,6 +21,7 @@ from libs.helper import email, extract_remote_ip
 from libs.password import hash_password, valid_password
 from models.account import Account
 from services.account_service import AccountService, TenantService
+from services.errors.account import AccountRegisterError
 from services.errors.workspace import WorkSpaceNotAllowedCreateError
 from services.feature_service import FeatureService
 
@@ -48,7 +50,7 @@ class ForgotPasswordSendEmailApi(Resource):
                 token = AccountService.send_reset_password_email(email=args["email"], language=language)
                 return {"result": "fail", "data": token, "code": "account_not_found"}
             else:
-                raise NotAllowedRegister()
+                raise AccountNotFound()
         else:
             token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language)
 
@@ -66,6 +68,10 @@ class ForgotPasswordCheckApi(Resource):
 
         user_email = args["email"]
 
+        is_forgot_password_error_rate_limit = AccountService.is_forgot_password_error_rate_limit(args["email"])
+        if is_forgot_password_error_rate_limit:
+            raise EmailPasswordResetLimitError()
+
         token_data = AccountService.get_reset_password_data(args["token"])
         if token_data is None:
             raise InvalidTokenError()
@@ -74,8 +80,10 @@ class ForgotPasswordCheckApi(Resource):
             raise InvalidEmailError()
 
         if args["code"] != token_data.get("code"):
+            AccountService.add_forgot_password_error_rate_limit(args["email"])
             raise EmailCodeError()
 
+        AccountService.reset_forgot_password_error_rate_limit(args["email"])
         return {"is_valid": True, "email": token_data.get("email")}
 
 
@@ -122,13 +130,15 @@ class ForgotPasswordResetApi(Resource):
         else:
             try:
                 account = AccountService.create_account_and_tenant(
-                    email=reset_data.get("email"),
-                    name=reset_data.get("email"),
+                    email=reset_data.get("email", ""),
+                    name=reset_data.get("email", ""),
                     password=password_confirm,
                     interface_language=languages[0],
                 )
             except WorkSpaceNotAllowedCreateError:
                 pass
+            except AccountRegisterError as are:
+                raise AccountInFreezeError()
 
         return {"result": "success"}
 

+ 27 - 10
api/controllers/console/auth/login.py

@@ -1,10 +1,11 @@
 from typing import cast
 
-import flask_login
+import flask_login  # type: ignore
 from flask import request
-from flask_restful import Resource, reqparse
+from flask_restful import Resource, reqparse  # type: ignore
 
 import services
+from configs import dify_config
 from constants.languages import languages
 from controllers.console import api
 from controllers.console.auth.error import (
@@ -16,9 +17,10 @@ from controllers.console.auth.error import (
 )
 from controllers.console.error import (
     AccountBannedError,
+    AccountInFreezeError,
+    AccountNotFound,
     EmailSendIpLimitError,
     NotAllowedCreateWorkspace,
-    NotAllowedRegister,
 )
 from controllers.console.wraps import setup_required
 from events.tenant_event import tenant_was_created
@@ -26,6 +28,8 @@ from libs.helper import email, extract_remote_ip
 from libs.password import valid_password
 from models.account import Account
 from services.account_service import AccountService, RegisterService, TenantService
+from services.billing_service import BillingService
+from services.errors.account import AccountRegisterError
 from services.errors.workspace import WorkSpaceNotAllowedCreateError
 from services.feature_service import FeatureService
 
@@ -44,6 +48,9 @@ class LoginApi(Resource):
         parser.add_argument("language", type=str, required=False, default="en-US", location="json")
         args = parser.parse_args()
 
+        if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(args["email"]):
+            raise AccountInFreezeError()
+
         is_login_error_rate_limit = AccountService.is_login_error_rate_limit(args["email"])
         if is_login_error_rate_limit:
             raise EmailPasswordLoginLimitError()
@@ -76,7 +83,7 @@ class LoginApi(Resource):
                 token = AccountService.send_reset_password_email(email=args["email"], language=language)
                 return {"result": "fail", "data": token, "code": "account_not_found"}
             else:
-                raise NotAllowedRegister()
+                raise AccountNotFound()
         # SELF_HOSTED only have one workspace
         tenants = TenantService.get_join_tenants(account)
         if len(tenants) == 0:
@@ -113,13 +120,15 @@ class ResetPasswordSendEmailApi(Resource):
             language = "zh-Hans"
         else:
             language = "en-US"
-
-        account = AccountService.get_user_through_email(args["email"])
+        try:
+            account = AccountService.get_user_through_email(args["email"])
+        except AccountRegisterError as are:
+            raise AccountInFreezeError()
         if account is None:
             if FeatureService.get_system_features().is_allow_register:
                 token = AccountService.send_reset_password_email(email=args["email"], language=language)
             else:
-                raise NotAllowedRegister()
+                raise AccountNotFound()
         else:
             token = AccountService.send_reset_password_email(account=account, language=language)
 
@@ -142,13 +151,16 @@ class EmailCodeLoginSendEmailApi(Resource):
             language = "zh-Hans"
         else:
             language = "en-US"
+        try:
+            account = AccountService.get_user_through_email(args["email"])
+        except AccountRegisterError as are:
+            raise AccountInFreezeError()
 
-        account = AccountService.get_user_through_email(args["email"])
         if account is None:
             if FeatureService.get_system_features().is_allow_register:
                 token = AccountService.send_email_code_login_email(email=args["email"], language=language)
             else:
-                raise NotAllowedRegister()
+                raise AccountNotFound()
         else:
             token = AccountService.send_email_code_login_email(account=account, language=language)
 
@@ -177,7 +189,10 @@ class EmailCodeLoginApi(Resource):
             raise EmailCodeError()
 
         AccountService.revoke_email_code_login_token(args["token"])
-        account = AccountService.get_user_through_email(user_email)
+        try:
+            account = AccountService.get_user_through_email(user_email)
+        except AccountRegisterError as are:
+            raise AccountInFreezeError()
         if account:
             tenant = TenantService.get_join_tenants(account)
             if not tenant:
@@ -196,6 +211,8 @@ class EmailCodeLoginApi(Resource):
                 )
             except WorkSpaceNotAllowedCreateError:
                 return NotAllowedCreateWorkspace()
+            except AccountRegisterError as are:
+                raise AccountInFreezeError()
         token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
         AccountService.reset_login_error_rate_limit(args["email"])
         return {"result": "success", "data": token_pair.model_dump()}

+ 11 - 9
api/controllers/console/auth/oauth.py

@@ -1,10 +1,10 @@
 import logging
-from datetime import datetime, timezone
-from typing import Optional, Dict
+from datetime import UTC, datetime
+from typing import Optional
 
 import requests
 from flask import current_app, redirect, request
-from flask_restful import Resource, marshal, fields
+from flask_restful import Resource  # type: ignore
 from werkzeug.exceptions import Unauthorized
 
 from flask_restful import Resource, reqparse
@@ -18,7 +18,7 @@ from libs.login import admin_api_key_required
 from models import Account
 from models.account import AccountStatus
 from services.account_service import AccountService, RegisterService, TenantService
-from services.errors.account import AccountNotFoundError
+from services.errors.account import AccountNotFoundError, AccountRegisterError
 from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
 from services.feature_service import FeatureService
 
@@ -54,7 +54,6 @@ class OAuthLogin(Resource):
         OAUTH_PROVIDERS = get_oauth_providers()
         with current_app.app_context():
             oauth_provider = OAUTH_PROVIDERS.get(provider)
-            print(vars(oauth_provider))
         if not oauth_provider:
             return {"error": "Invalid provider"}, 400
 
@@ -79,8 +78,9 @@ class OAuthCallback(Resource):
         try:
             token = oauth_provider.get_access_token(code)
             user_info = oauth_provider.get_user_info(token)
-        except requests.exceptions.HTTPError as e:
-            logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
+        except requests.exceptions.RequestException as e:
+            error_text = e.response.text if e.response else str(e)
+            logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}")
             return {"error": "OAuth process failed"}, 400
 
         if invite_token and RegisterService.is_valid_invite_token(invite_token):
@@ -101,6 +101,8 @@ class OAuthCallback(Resource):
                 f"{dify_config.CONSOLE_WEB_URL}/signin"
                 "?message=Workspace not found, please contact system admin to invite you to join in a workspace."
             )
+        except AccountRegisterError as e:
+            return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={e.description}")
 
         # Check account status
         if account.status == AccountStatus.BANNED.value:
@@ -108,7 +110,7 @@ class OAuthCallback(Resource):
 
         if account.status == AccountStatus.PENDING.value:
             account.status = AccountStatus.ACTIVE.value
-            account.initialized_at = datetime.now(timezone.utc).replace(tzinfo=None)
+            account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
             db.session.commit()
 
         try:
@@ -146,7 +148,7 @@ class RegisterApi(Resource):
 
 
 def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
-    account = Account.get_by_openid(provider, user_info.id)
+    account: Optional[Account] = Account.get_by_openid(provider, user_info.id)
 
     if not account:
         account = Account.query.filter_by(email=user_info.email).first()

Alguns arquivos não foram mostrados porque muitos arquivos mudaram nesse diff