Compare commits

...

2 Commits

Author SHA1 Message Date
jiangxucong c609367e9e Merge remote-tracking branch 'origin/main' into main
# Conflicts:
#	DigitalHumanWeb/Dockerfile
2 months ago
jiangxucong 2c5227b509 配置文件修改 2 months ago

@ -0,0 +1 @@
module.exports = require('@lobehub/lint').changelog;

@ -0,0 +1 @@
module.exports = require('@lobehub/lint').commitlint;

@ -0,0 +1,6 @@
{
"image": "mcr.microsoft.com/devcontainers/universal:2",
"features": {
"ghcr.io/devcontainers/features/node:1": {}
}
}

@ -6,7 +6,7 @@
######################################## ########################################
######## Model Provider Service ######## ########## AI Provider Service #########
######################################## ########################################
### OpenAI ### ### OpenAI ###
@ -33,7 +33,7 @@ OPENAI_API_KEY=sk-xxxxxxxxx
# AZURE_ENDPOINT=https://docs-test-001.openai.azure.com # AZURE_ENDPOINT=https://docs-test-001.openai.azure.com
# Azure's API version, follows the YYYY-MM-DD format # Azure's API version, follows the YYYY-MM-DD format
# AZURE_API_VERSION=2024-06-01 # AZURE_API_VERSION=2024-10-21
### Anthropic Service #### ### Anthropic Service ####
@ -106,16 +106,31 @@ OPENAI_API_KEY=sk-xxxxxxxxx
### DeepSeek AI #### ### DeepSeek AI ####
# DEEPSEEK_PROXY_URL=https://api.deepseek.com/v1
# DEEPSEEK_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx # DEEPSEEK_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### Qwen AI #### ### Qwen AI ####
# QWEN_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # QWEN_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### Cloudflare Workers AI ####
# CLOUDFLARE_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# CLOUDFLARE_BASE_URL_OR_ACCOUNT_ID=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### SiliconCloud AI #### ### SiliconCloud AI ####
# SILICONCLOUD_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # SILICONCLOUD_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### TencentCloud AI ####
# TENCENT_CLOUD_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
### PPIO ####
# PPIO_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
######################################## ########################################
############ Market Service ############ ############ Market Service ############
######################################## ########################################
@ -134,20 +149,31 @@ OPENAI_API_KEY=sk-xxxxxxxxx
# the format is `plugin-identifier:key1=value1;key2=value2`, multiple settings fields are separated by semicolons `;`, multiple plugin settings are separated by commas `,`. # the format is `plugin-identifier:key1=value1;key2=value2`, multiple settings fields are separated by semicolons `;`, multiple plugin settings are separated by commas `,`.
# PLUGIN_SETTINGS=search-engine:SERPAPI_API_KEY=xxxxx # PLUGIN_SETTINGS=search-engine:SERPAPI_API_KEY=xxxxx
########################################
####### Doc / Changelog Service ########
########################################
# Use in Changelog / Document service cdn url prefix
# DOC_S3_PUBLIC_DOMAIN=https://xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Use in dev cdn workflow
# DOC_S3_ACCESS_KEY_ID=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# DOC_S3_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
######################################## ########################################
##### S3 Object Storage Service ######## ##### S3 Object Storage Service ########
######################################## ########################################
# S3 keys # S3 keys
#S3_ACCESS_KEY_ID=9998d6757e276cf9f1edbd325b7083a6 # S3_ACCESS_KEY_ID=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
#S3_SECRET_ACCESS_KEY=55af75d8eb6b99f189f6a35f855336ea62cd9c4751a5cf4337c53c1d3f497ac2 # S3_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Bucket name # Bucket name
# S3_BUCKET=lobechat # S3_BUCKET=lobechat
# Bucket request endpoint # Bucket request endpoint
#S3_ENDPOINT=https://0b33a03b5c993fd2f453379dc36558e5.r2.cloudflarestorage.com # S3_ENDPOINT=https://xxxxxxxxxxxxxxxxxxxxxxxxxxxxx.r2.cloudflarestorage.com
# Public access domain for the bucket # Public access domain for the bucket
# S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com # S3_PUBLIC_DOMAIN=https://s3-for-lobechat.your-domain.com
@ -173,12 +199,13 @@ OPENAI_API_KEY=sk-xxxxxxxxx
# NextAuth related configurations # NextAuth related configurations
# NEXT_PUBLIC_ENABLE_NEXT_AUTH=1
# NEXT_AUTH_SECRET= # NEXT_AUTH_SECRET=
# Auth0 configurations # Auth0 configurations
# AUTH0_CLIENT_ID= # AUTH_AUTH0_ID=
# AUTH0_CLIENT_SECRET= # AUTH_AUTH0_SECRET=
# AUTH0_ISSUER=https://your-domain.auth0.com # AUTH_AUTH0_ISSUER=https://your-domain.auth0.com
######################################## ########################################
########## Server Database ############# ########## Server Database #############
@ -191,5 +218,8 @@ OPENAI_API_KEY=sk-xxxxxxxxx
# DATABASE_URL=postgres://username:password@host:port/database # DATABASE_URL=postgres://username:password@host:port/database
# use `openssl rand -base64 32` to generate a key for the encryption of the database # use `openssl rand -base64 32` to generate a key for the encryption of the database
# we use this key to encrypt the user api key # we use this key to encrypt the user api key and proxy url
#KEY_VAULTS_SECRET=xxxxx/xxxxxxxxxxxxxx= #KEY_VAULTS_SECRET=xxxxx/xxxxxxxxxxxxxx=
# Specify the Embedding model and Reranker model(unImplemented)
# DEFAULT_FILES_CONFIG="embedding_model=openai/embedding-text-3-small,reranker_model=cohere/rerank-english-v3.0,query_mode=full_text"

@ -0,0 +1,31 @@
# Eslintignore for LobeHub
################################################################
# dependencies
node_modules
# ci
coverage
.coverage
# test
jest*
*.test.ts
*.test.tsx
# umi
.umi
.umi-production
.umi-test
.dumi/tmp*
!.dumirc.ts
# production
dist
es
lib
logs
# misc
# add other ignore file below
.next

@ -0,0 +1,37 @@
const config = require('@lobehub/lint').eslint;
config.extends.push('plugin:@next/next/recommended');
config.rules['unicorn/no-negated-condition'] = 0;
config.rules['unicorn/prefer-type-error'] = 0;
config.rules['unicorn/prefer-logical-operator-over-ternary'] = 0;
config.rules['unicorn/no-null'] = 0;
config.rules['unicorn/no-typeof-undefined'] = 0;
config.rules['unicorn/explicit-length-check'] = 0;
config.rules['unicorn/prefer-code-point'] = 0;
config.rules['no-extra-boolean-cast'] = 0;
config.rules['unicorn/no-useless-undefined'] = 0;
config.rules['react/no-unknown-property'] = 0;
config.rules['unicorn/prefer-ternary'] = 0;
config.rules['unicorn/prefer-spread'] = 0;
config.rules['unicorn/catch-error-name'] = 0;
config.rules['unicorn/no-array-for-each'] = 0;
config.rules['unicorn/prefer-number-properties'] = 0;
config.overrides = [
{
extends: ['plugin:mdx/recommended'],
files: ['*.mdx'],
rules: {
'@typescript-eslint/no-unused-vars': 1,
'no-undef': 0,
'react/jsx-no-undef': 0,
'react/no-unescaped-entities': 0,
},
settings: {
'mdx/code-blocks': false,
},
},
];
module.exports = config;

@ -0,0 +1,13 @@
# These are supported funding model platforms
github: lobehub
patreon: # Replace with a single Patreon username
open_collective: lobehub
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: lobehub/lobe-chat
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

@ -0,0 +1,79 @@
name: '🐛 Bug Report'
description: 'Report an bug'
title: '[Bug] '
labels: ['🐛 Bug']
body:
- type: dropdown
attributes:
label: '📦 Platform'
multiple: true
options:
- 'Official Preview'
- 'Vercel'
- 'Zeabur'
- 'Sealos'
- 'Netlify'
- 'Self hosting Docker'
- 'Other'
validations:
required: true
- type: dropdown
attributes:
label: '📦 Deploymenet mode'
multiple: true
options:
- 'client db (lobe-chat image)'
- 'client pgelite db (lobe-chat-pglite image)'
- 'server db(lobe-chat-database image)'
validations:
required: true
- type: input
attributes:
label: '📌 Version'
validations:
required: true
- type: dropdown
attributes:
label: '💻 Operating System'
multiple: true
options:
- 'Windows'
- 'macOS'
- 'Ubuntu'
- 'Other Linux'
- 'iOS'
- 'Android'
- 'Other'
validations:
required: true
- type: dropdown
attributes:
label: '🌐 Browser'
multiple: true
options:
- 'Chrome'
- 'Edge'
- 'Safari'
- 'Firefox'
- 'Other'
validations:
required: true
- type: textarea
attributes:
label: '🐛 Bug Description'
description: A clear and concise description of the bug, if the above option is `Other`, please also explain in detail.
validations:
required: true
- type: textarea
attributes:
label: '📷 Recurrence Steps'
description: A clear and concise description of how to recurrence.
- type: textarea
attributes:
label: '🚦 Expected Behavior'
description: A clear and concise description of what you expected to happen.
- type: textarea
attributes:
label: '📝 Additional Information'
description: If your problem needs further explanation, or if the issue you're seeing cannot be reproduced in a gist, please add more information here.

@ -0,0 +1,87 @@
name: '🐛 反馈缺陷'
description: '反馈一个问题缺陷'
title: '[Bug] '
labels: ['🐛 Bug']
type: Bug
body:
- type: markdown
attributes:
value: |
在创建新的 Issue 之前,请先[搜索已有问题](https://github.com/lobehub/lobe-chat/issues),如果发现已有类似的问题,请给它 **👍 点赞**,这样可以帮助我们更快地解决问题。
如果你在使用过程中遇到问题,可以尝试以下方式获取帮助:
- 在 [GitHub Discussions](https://github.com/lobehub/lobe-chat/discussions) 的版块发起讨论。
- 在 [LobeChat 社区](https://discord.gg/AYFPHvv2jT) 提问,与其他用户交流。
- type: dropdown
attributes:
label: '📦 部署环境'
multiple: true
options:
- 'Official Preview'
- 'Vercel'
- 'Zeabur'
- 'Sealos'
- 'Netlify'
- 'Docker'
- 'Other'
validations:
required: true
- type: dropdown
attributes:
label: '📦 部署模式'
multiple: true
options:
- '客户端模式(lobe-chat 镜像)'
- '客户端 Pglite 模式(lobe-chat-pglite 镜像)'
- '服务端模式(lobe-chat-database 镜像)'
validations:
required: true
- type: input
attributes:
label: '📌 软件版本'
validations:
required: true
- type: dropdown
attributes:
label: '💻 系统环境'
multiple: true
options:
- 'Windows'
- 'macOS'
- 'Ubuntu'
- 'Other Linux'
- 'iOS'
- 'Android'
- 'Other'
validations:
required: true
- type: dropdown
attributes:
label: '🌐 浏览器'
multiple: true
options:
- 'Chrome'
- 'Edge'
- 'Safari'
- 'Firefox'
- 'Other'
validations:
required: true
- type: textarea
attributes:
label: '🐛 问题描述'
description: 请提供一个清晰且简洁的问题描述,若上述选项为`Other`,也请详细说明。
validations:
required: true
- type: textarea
attributes:
label: '📷 复现步骤'
description: 请提供一个清晰且简洁的描述,说明如何复现问题。
- type: textarea
attributes:
label: '🚦 期望结果'
description: 请提供一个清晰且简洁的描述,说明您期望发生什么。
- type: textarea
attributes:
label: '📝 补充信息'
description: 如果您的问题需要进一步说明,或者您遇到的问题无法在一个简单的示例中复现,请在这里添加更多信息。

@ -0,0 +1,21 @@
name: '🌠 Feature Request'
description: 'Suggest an idea'
title: '[Request] '
labels: ['🌠 Feature Request']
body:
- type: textarea
attributes:
label: '🥰 Feature Description'
description: Please add a clear and concise description of the problem you are seeking to solve with this feature request.
validations:
required: true
- type: textarea
attributes:
label: '🧐 Proposed Solution'
description: Describe the solution you'd like in a clear and concise manner.
validations:
required: true
- type: textarea
attributes:
label: '📝 Additional Information'
description: Add any other context about the problem here.

@ -0,0 +1,21 @@
name: '🌠 功能需求'
description: '提出需求或建议'
title: '[Request] '
labels: ['🌠 Feature Request']
body:
- type: textarea
attributes:
label: '🥰 需求描述'
description: 请添加一个清晰且简洁的问题描述,阐述您希望通过这个功能需求解决的问题。
validations:
required: true
- type: textarea
attributes:
label: '🧐 解决方案'
description: 请清晰且简洁地描述您想要的解决方案。
validations:
required: true
- type: textarea
attributes:
label: '📝 补充信息'
description: 在这里添加关于问题的任何其他背景信息。

@ -0,0 +1,7 @@
contact_links:
- name: Ask a question for self-hosting | 咨询自部署问题
url: https://github.com/lobehub/lobe-chat/discussions/new?category=self-hosting-%E7%A7%81%E6%9C%89%E5%8C%96%E9%83%A8%E7%BD%B2
about: Please post questions, and ideas in discussions. | 请在讨论区发布问题和想法。
- name: Questions and ideas | 其他问题和想法
url: https://github.com/lobehub/lobe-chat/discussions/new/choose
about: Please post questions, and ideas in discussions. | 请在讨论区发布问题和想法。

@ -0,0 +1,20 @@
#### 💻 变更类型 | Change Type
<!-- For change type, change [ ] to [x]. -->
- [ ] ✨ feat
- [ ] 🐛 fix
- [ ] ♻️ refactor
- [ ] 💄 style
- [ ] 👷 build
- [ ] ⚡️ perf
- [ ] 📝 docs
- [ ] 🔨 chore
#### 🔀 变更说明 | Description of Change
<!-- Thank you for your Pull Request. Please provide a description above. -->
#### 📝 补充信息 | Additional Information
<!-- Add any other context about the Pull Request here. -->

@ -0,0 +1,161 @@
name: Publish Database Docker Image
on:
workflow_dispatch:
release:
types: [published]
pull_request:
types: [synchronize, labeled, unlabeled]
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
REGISTRY_IMAGE: lobehub/lobe-chat-database
PR_TAG_PREFIX: pr-
jobs:
build:
# 添加 PR label 触发条件
if: |
(github.event_name == 'pull_request' &&
contains(github.event.pull_request.labels.*.name, 'Build Docker')) ||
github.event_name != 'pull_request'
strategy:
matrix:
include:
- platform: linux/amd64
os: ubuntu-latest
- platform: linux/arm64
os: ubuntu-24.04-arm
runs-on: ${{ matrix.os }}
name: Build ${{ matrix.platform }} Image
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 PR 生成特殊的 tag
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
# PR 构建使用特殊的 tag
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
# release 构建使用版本号
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Get commit SHA
if: github.ref == 'refs/heads/main'
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
uses: docker/build-push-action@v5
with:
platforms: ${{ matrix.platform }}
context: .
file: ./Dockerfile.database
labels: ${{ steps.meta.outputs.labels }}
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
rm -rf /tmp/digests
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: digest-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Merge
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digest-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 merge job 添加 PR metadata 生成
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}

@ -0,0 +1,161 @@
name: Publish Docker Pglite Image
on:
workflow_dispatch:
release:
types: [published]
pull_request:
types: [synchronize, labeled, unlabeled]
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
REGISTRY_IMAGE: lobehub/lobe-chat-pglite
PR_TAG_PREFIX: pr-
jobs:
build:
# 添加 PR label 触发条件
if: |
(github.event_name == 'pull_request' &&
contains(github.event.pull_request.labels.*.name, 'Build Docker')) ||
github.event_name != 'pull_request'
strategy:
matrix:
include:
- platform: linux/amd64
os: ubuntu-latest
- platform: linux/arm64
os: ubuntu-24.04-arm
runs-on: ${{ matrix.os }}
name: Build ${{ matrix.platform }} Image
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 PR 生成特殊的 tag
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
# PR 构建使用特殊的 tag
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
# release 构建使用版本号
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Get commit SHA
if: github.ref == 'refs/heads/main'
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
uses: docker/build-push-action@v5
with:
platforms: ${{ matrix.platform }}
context: .
file: ./Dockerfile.pglite
labels: ${{ steps.meta.outputs.labels }}
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
rm -rf /tmp/digests
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: digest-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Merge
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digest-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 merge job 添加 PR metadata 生成
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}

@ -0,0 +1,161 @@
name: Publish Docker Image
on:
workflow_dispatch:
release:
types: [published]
pull_request:
types: [synchronize, labeled, unlabeled]
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
REGISTRY_IMAGE: lobehub/lobe-chat
PR_TAG_PREFIX: pr-
jobs:
build:
# 添加 PR label 触发条件
if: |
(github.event_name == 'pull_request' &&
contains(github.event.pull_request.labels.*.name, 'Build Docker')) ||
github.event_name != 'pull_request'
strategy:
matrix:
include:
- platform: linux/amd64
os: ubuntu-latest
- platform: linux/arm64
os: ubuntu-24.04-arm
runs-on: ${{ matrix.os }}
name: Build ${{ matrix.platform }} Image
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 PR 生成特殊的 tag
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
# PR 构建使用特殊的 tag
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
# release 构建使用版本号
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Get commit SHA
if: github.ref == 'refs/heads/main'
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
uses: docker/build-push-action@v5
with:
platforms: ${{ matrix.platform }}
context: .
file: ./Dockerfile
labels: ${{ steps.meta.outputs.labels }}
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
rm -rf /tmp/digests
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: digest-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Merge
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout base
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digest-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# 为 merge job 添加 PR metadata 生成
- name: Generate PR metadata
if: github.event_name == 'pull_request'
id: pr_meta
run: |
branch_name="${{ github.head_ref }}"
sanitized_branch=$(echo "${branch_name}" | sed -E 's/[^a-zA-Z0-9_.-]+/-/g')
echo "pr_tag=${sanitized_branch}-$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
type=raw,value=${{ env.PR_TAG_PREFIX }}${{ steps.pr_meta.outputs.pr_tag }},enable=${{ github.event_name == 'pull_request' }}
type=semver,pattern={{version}},enable=${{ github.event_name != 'pull_request' }}
type=raw,value=latest,enable=${{ github.event_name != 'pull_request' }}
- name: Docker login
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}

@ -0,0 +1,73 @@
name: Issue Auto Comment
on:
issues:
types:
- opened
- closed
- assigned
pull_request_target:
types:
- opened
- closed
permissions:
contents: read
jobs:
run:
permissions:
issues: write # for actions-cool/issues-helper to update issues
pull-requests: write # for actions-cool/issues-helper to update PRs
runs-on: ubuntu-latest
steps:
- name: Auto Comment on Issues Opened
uses: wow-actions/auto-comment@v1
with:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN}}
issuesOpened: |
👀 @{{ author }}
Thank you for raising an issue. We will investigate into the matter and get back to you as soon as possible.
Please make sure you have given us as much context as possible.\
非常感谢您提交 issue。我们会尽快调查此事并尽快回复您。 请确保您已经提供了尽可能多的背景信息。
- name: Auto Comment on Issues Closed
uses: wow-actions/auto-comment@v1
with:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN}}
issuesClosed: |
✅ @{{ author }}
This issue is closed, If you have any questions, you can comment and reply.\
此问题已经关闭。如果您有任何问题,可以留言并回复。
- name: Auto Comment on Pull Request Opened
uses: wow-actions/auto-comment@v1
with:
GITHUB_TOKEN: ${{ secrets.GH_TOKEN}}
pullRequestOpened: |
👍 @{{ author }}
Thank you for raising your pull request and contributing to our Community
Please make sure you have followed our contributing guidelines. We will review it as soon as possible.
If you encounter any problems, please feel free to connect with us.\
非常感谢您提出拉取请求并为我们的社区做出贡献,请确保您已经遵循了我们的贡献指南,我们会尽快审查它。
如果您遇到任何问题,请随时与我们联系。
- name: Auto Comment on Pull Request Merged
uses: actions-cool/pr-welcome@main
if: github.event.pull_request.merged == true
with:
token: ${{ secrets.GH_TOKEN }}
comment: |
❤️ Great PR @${{ github.event.pull_request.user.login }} ❤️
The growth of project is inseparable from user feedback and contribution, thanks for your contribution! If you are interesting with the lobehub developer community, please join our [discord](https://discord.com/invite/AYFPHvv2jT) and then dm @arvinxx or @canisminor1990. They will invite you to our private developer channel. We are talking about the lobe-chat development or sharing ai newsletter around the world.\
项目的成长离不开用户反馈和贡献,感谢您的贡献! 如果您对 LobeHub 开发者社区感兴趣,请加入我们的 [discord](https://discord.com/invite/AYFPHvv2jT),然后私信 @arvinxx 或 @canisminor1990。他们会邀请您加入我们的私密开发者频道。我们将会讨论关于 Lobe Chat 的开发,分享和讨论全球范围内的 AI 消息。
emoji: 'hooray'
pr-emoji: '+1, heart'
- name: Remove inactive
if: github.event.issue.state == 'open' && github.actor == github.event.issue.user.login
uses: actions-cool/issues-helper@v3
with:
actions: 'remove-labels'
token: ${{ secrets.GH_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: 'Inactive'

@ -0,0 +1,66 @@
name: Issue Close Require
on:
schedule:
- cron: '0 0 * * *'
permissions:
contents: read
jobs:
issue-check-inactive:
permissions:
issues: write # for actions-cool/issues-helper to update issues
pull-requests: write # for actions-cool/issues-helper to update PRs
runs-on: ubuntu-latest
steps:
- name: check-inactive
uses: actions-cool/issues-helper@v3
with:
actions: 'check-inactive'
token: ${{ secrets.GH_TOKEN }}
inactive-label: 'Inactive'
inactive-day: 60
issue-close-require:
permissions:
issues: write # for actions-cool/issues-helper to update issues
pull-requests: write # for actions-cool/issues-helper to update PRs
runs-on: ubuntu-latest
steps:
- name: need reproduce
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GH_TOKEN }}
labels: '✅ Fixed'
inactive-day: 3
body: |
👋 @{{ author }}
<br/>
Since the issue was labeled with `✅ Fixed`, but no response in 3 days. This issue will be closed. If you have any questions, you can comment and reply.\
由于该 issue 被标记为已修复,同时 3 天未收到回应。现关闭 issue若有任何问题可评论回复。
- name: need reproduce
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GH_TOKEN }}
labels: '🤔 Need Reproduce'
inactive-day: 3
body: |
👋 @{{ author }}
<br/>
Since the issue was labeled with `🤔 Need Reproduce`, but no response in 3 days. This issue will be closed. If you have any questions, you can comment and reply.\
由于该 issue 被标记为需要更多信息,却 3 天未收到回应。现关闭 issue若有任何问题可评论回复。
- name: need reproduce
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GH_TOKEN }}
labels: "🙅🏻‍♀️ WON'T DO"
inactive-day: 3
body: |
👋 @{{ github.event.issue.user.login }}
<br/>
Since the issue was labeled with `🙅🏻‍♀️ WON'T DO`, and no response in 3 days. This issue will be closed. If you have any questions, you can comment and reply.\
由于该 issue 被标记为暂不处理,同时 3 天未收到回应。现关闭 issue若有任何问题可评论回复。

@ -0,0 +1,14 @@
name: Issue Translate
on:
issue_comment:
types: [created]
issues:
types: [opened]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: usthe/issues-translate-action@v2.7
with:
BOT_GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}

@ -0,0 +1,64 @@
name: Lighthouse Badger
env:
TOKEN_NAME: 'GH_TOKEN'
REPO_BRANCH: 'lobehub/lobe-chat lighthouse'
USER_NAME: 'lobehubbot'
USER_EMAIL: 'i@lobehub.com'
AUDIT_TYPE: 'both'
MOBILE_LIGHTHOUSE_PARAMS: '--throttling.cpuSlowdownMultiplier=2'
DESKTOP_LIGHTHOUSE_PARAMS: '--preset=desktop --throttling.cpuSlowdownMultiplier=1'
COMMIT_MESSAGE: '🤖 chore: Lighthouse Results Refreshed'
on:
schedule:
- cron: '0 0 * * *' # every day
workflow_dispatch:
jobs:
lighthouse-badger-advanced:
name: ${{ matrix.NAME }}
runs-on: ubuntu-24.04
timeout-minutes: 8
strategy:
fail-fast: false
matrix:
include:
- NAME: 'LobeChat | Chat'
URLS: 'https://lobechat.com/chat'
BADGES_ARGS: '-b pagespeed -o lighthouse/chat -r'
COMMIT_MESSAGE: '🤖 chore: Lighthouse Results | Chat'
- NAME: 'LobeChat | Market'
URLS: 'https://lobechat.com/discover'
BADGES_ARGS: '-b pagespeed -o lighthouse/discover -r'
COMMIT_MESSAGE: '🤖 chore: Lighthouse Results | Discover'
steps:
- name: Preparatory Tasks
run: |
REPOSITORY=`expr "${{ env.REPO_BRANCH }}" : "\([^ ]*\)"`
BRANCH=`expr "${{ env.REPO_BRANCH }}" : ".* \([^ ]*\)"`
echo "REPOSITORY=$REPOSITORY" >> $GITHUB_ENV
echo "BRANCH=$BRANCH" >> $GITHUB_ENV
env:
REPO_BRANCH: ${{ matrix.REPO_BRANCH || env.REPO_BRANCH }}
- uses: actions/checkout@v4
with:
repository: ${{ env.REPOSITORY }}
token: ${{ secrets[matrix.TOKEN_NAME] || secrets[env.TOKEN_NAME] }}
ref: ${{ env.BRANCH }}
- uses: actions/checkout@v4
with:
repository: 'myactionway/lighthouse-badges'
path: temp_lighthouse_badges_nested
- uses: myactionway/lighthouse-badger-action@v2.2
with:
urls: ${{ matrix.URLS }}
badges_args: ${{ matrix.BADGES_ARGS }}
audit_type: ${{ matrix.AUDIT_TYPE || env.AUDIT_TYPE }}
mobile_lighthouse_params: ${{ matrix.MOBILE_LIGHTHOUSE_PARAMS || env.MOBILE_LIGHTHOUSE_PARAMS }}
desktop_lighthouse_params: ${{ matrix.DESKTOP_LIGHTHOUSE_PARAMS || env.DESKTOP_LIGHTHOUSE_PARAMS }}
user_name: ${{ matrix.USER_NAME || env.USER_NAME }}
user_email: ${{ matrix.USER_EMAIL || env.USER_EMAIL }}
commit_message: ${{ matrix.COMMIT_MESSAGE || env.COMMIT_MESSAGE }}
max_push_attempts: 5

@ -0,0 +1,67 @@
name: Release CI
on:
push:
branches:
- main
jobs:
release:
name: Release
runs-on: ubuntu-latest
services:
postgres:
image: pgvector/pgvector:pg16
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: ${{ secrets.BUN_VERSION }}
- name: Install deps
run: bun i
- name: Lint
run: bun run lint
- name: Test Server Coverage
run: bun run test-server:coverage
env:
DATABASE_TEST_URL: postgresql://postgres:postgres@localhost:5432/postgres
DATABASE_DRIVER: node
NEXT_PUBLIC_SERVICE_MODE: server
KEY_VAULTS_SECRET: LA7n9k3JdEcbSgml2sxfw+4TV1AzaaFU5+R176aQz4s=
S3_PUBLIC_DOMAIN: https://example.com
APP_URL: https://home.com
- name: Test App Coverage
run: bun run test-app:coverage
- name: Release
run: bun run release
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Workflow
run: bun run workflow:readme
- name: Commit changes
run: |-
git diff
git config --global user.name "lobehubbot"
git config --global user.email "i@lobehub.com"
git add .
git commit -m "📝 docs(bot): Auto sync agents & plugin to readme" || exit 0
git push
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}

@ -0,0 +1,54 @@
name: Upstream Sync
permissions:
contents: write
issues: write
actions: write
on:
schedule:
- cron: '0 */6 * * *' # every 6 hours
workflow_dispatch:
jobs:
sync_latest_from_upstream:
name: Sync latest commits from upstream repo
runs-on: ubuntu-latest
if: ${{ github.event.repository.fork }}
steps:
- uses: actions/checkout@v4
- name: Clean issue notice
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
labels: '🚨 Sync Fail'
- name: Sync upstream changes
id: sync
uses: aormsby/Fork-Sync-With-Upstream-action@v3.4
with:
upstream_sync_repo: lobehub/lobe-chat
upstream_sync_branch: main
target_sync_branch: main
target_repo_token: ${{ secrets.GITHUB_TOKEN }} # automatically generated, no need to set
test_mode: false
- name: Sync check
if: failure()
uses: actions-cool/issues-helper@v3
with:
actions: 'create-issue'
title: '🚨 同步失败 | Sync Fail'
labels: '🚨 Sync Fail'
body: |
Due to a change in the workflow file of the [LobeChat][lobechat] upstream repository, GitHub has automatically suspended the scheduled automatic update. You need to manually sync your fork. Please refer to the detailed [Tutorial][tutorial-en-US] for instructions.
由于 [LobeChat][lobechat] 上游仓库的 workflow 文件变更,导致 GitHub 自动暂停了本次自动更新,你需要手动 Sync Fork 一次,请查看 [详细教程][tutorial-zh-CN]
![](https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/273954625-df80c890-0822-4ac2-95e6-c990785cbed5.png)
[lobechat]: https://github.com/lobehub/lobe-chat
[tutorial-zh-CN]: https://github.com/lobehub/lobe-chat/wiki/Upstream-Sync.zh-CN
[tutorial-en-US]: https://github.com/lobehub/lobe-chat/wiki/Upstream-Sync

@ -0,0 +1,60 @@
name: Test CI
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
services:
postgres:
image: pgvector/pgvector:pg16
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v4
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: ${{ secrets.BUN_VERSION }}
- name: Install deps
run: bun i
- name: Lint
run: bun run lint
- name: Test Server Coverage
run: bun run test-server:coverage
env:
DATABASE_TEST_URL: postgresql://postgres:postgres@localhost:5432/postgres
DATABASE_DRIVER: node
NEXT_PUBLIC_SERVICE_MODE: server
KEY_VAULTS_SECRET: LA7n9k3JdEcbSgml2sxfw+4TV1AzaaFU5+R176aQz4s=
S3_PUBLIC_DOMAIN: https://example.com
APP_URL: https://home.com
- name: Upload Server coverage to Codecov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage/server/lcov.info
flags: server
- name: Test App Coverage
run: bun run test-app:coverage
- name: Upload App Coverage to Codecov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: ./coverage/app/lcov.info
flags: app

@ -0,0 +1,19 @@
name: Wiki Sync
on:
workflow_dispatch:
push:
paths:
- 'contributing/**'
branches:
- main
jobs:
update-wiki:
runs-on: ubuntu-latest
name: Wiki sync
steps:
- uses: OrlovM/Wiki-Action@v1
with:
path: 'contributing'
token: ${{ secrets.GH_TOKEN }}

@ -67,3 +67,5 @@ public/swe-worker*
*.patch *.patch
*.pdf *.pdf
vertex-ai-key.json
.pnpm-store

@ -1 +1,2 @@
npm run type-check
npx --no-install lint-staged

@ -21,6 +21,7 @@ module.exports = defineConfig({
'nl-NL', 'nl-NL',
'pl-PL', 'pl-PL',
'vi-VN', 'vi-VN',
'fa-IR',
], ],
temperature: 0, temperature: 0,
modelName: 'gpt-4o-mini', modelName: 'gpt-4o-mini',
@ -28,11 +29,17 @@ module.exports = defineConfig({
jsonMode: true, jsonMode: true,
}, },
markdown: { markdown: {
// reference: '你需要保持 mdx 的组件格式,输出文本不需要在最外层包裹任何代码块语法', reference: '你需要保持 mdx 的组件格式,输出文本不需要在最外层包裹任何代码块语法',
entry: ['./README.zh-CN.md', './contributing/**/*.zh-CN.md', './docs/**/*.zh-CN.mdx'], entry: ['./README.zh-CN.md', './contributing/**/*.zh-CN.md', './docs/**/*.zh-CN.mdx'],
entryLocale: 'zh-CN', entryLocale: 'zh-CN',
outputLocales: ['en-US'], outputLocales: ['en-US'],
exclude: ['./contributing/_Sidebar.md', './contributing/_Footer.md', './contributing/Home.md'], includeMatter: true,
exclude: [
'./src/**/*',
'./contributing/_Sidebar.md',
'./contributing/_Footer.md',
'./contributing/Home.md',
],
outputExtensions: (locale, { filePath }) => { outputExtensions: (locale, { filePath }) => {
if (filePath.includes('.mdx')) { if (filePath.includes('.mdx')) {
if (locale === 'en-US') return '.mdx'; if (locale === 'en-US') return '.mdx';

@ -1 +1 @@
lts/iron lts/jod

@ -0,0 +1 @@
module.exports = require('@lobehub/lint').prettier;

@ -0,0 +1,10 @@
const config = require('@lobehub/lint').semanticRelease;
config.plugins.push([
'@semantic-release/exec',
{
prepareCmd: 'npm run workflow:changelog',
},
]);
module.exports = config;

@ -0,0 +1 @@
module.exports = require('@lobehub/lint').remarklint;

@ -0,0 +1,6 @@
const config = require('@lobehub/lint').remarklint;
module.exports = {
...config,
plugins: ['remark-mdx', ...config.plugins],
};

@ -0,0 +1,9 @@
const config = require('@lobehub/lint').stylelint;
module.exports = {
...config,
rules: {
'selector-id-pattern': null,
...config.rules,
},
};

File diff suppressed because it is too large Load Diff

@ -1,5 +1,8 @@
## Base image for all the stages ## Set global build ENV
FROM node:22-slim AS base ARG NODEJS_VERSION="22"
## Base image for all building stages
FROM node:${NODEJS_VERSION}-slim AS base
ARG USE_CN_MIRROR ARG USE_CN_MIRROR
@ -10,19 +13,22 @@ RUN \
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \ if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
sed -i "s/deb.debian.org/mirrors.ustc.edu.cn/g" "/etc/apt/sources.list.d/debian.sources"; \ sed -i "s/deb.debian.org/mirrors.ustc.edu.cn/g" "/etc/apt/sources.list.d/debian.sources"; \
fi \ fi \
# Add required package & update base package # Add required package
&& apt update \ && apt update \
&& apt install busybox proxychains-ng -qy \ && apt install ca-certificates proxychains-ng -qy \
&& apt full-upgrade -qy \ # Prepare required package to distroless
&& apt autoremove -qy --purge \ && mkdir -p /distroless/bin /distroless/etc /distroless/etc/ssl/certs /distroless/lib \
&& apt clean -qy \ # Copy proxychains to distroless
# Configure BusyBox && cp /usr/lib/$(arch)-linux-gnu/libproxychains.so.4 /distroless/lib/libproxychains.so.4 \
&& busybox --install -s \ && cp /usr/lib/$(arch)-linux-gnu/libdl.so.2 /distroless/lib/libdl.so.2 \
# Add nextjs:nodejs to run the app && cp /usr/bin/proxychains4 /distroless/bin/proxychains \
&& addgroup --system --gid 1001 nodejs \ && cp /etc/proxychains4.conf /distroless/etc/proxychains4.conf \
&& adduser --system --home "/app" --gid 1001 -uid 1001 nextjs \ # Copy node to distroless
# Set permission for nextjs:nodejs && cp /usr/lib/$(arch)-linux-gnu/libstdc++.so.6 /distroless/lib/libstdc++.so.6 \
&& chown -R nextjs:nodejs "/etc/proxychains4.conf" \ && cp /usr/lib/$(arch)-linux-gnu/libgcc_s.so.1 /distroless/lib/libgcc_s.so.1 \
&& cp /usr/local/bin/node /distroless/bin/node \
# Copy CA certificates to distroless
&& cp /etc/ssl/certs/ca-certificates.crt /distroless/etc/ssl/certs/ca-certificates.crt \
# Cleanup temp files # Cleanup temp files
&& rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/* && rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/*
@ -30,53 +36,58 @@ RUN \
FROM base AS builder FROM base AS builder
ARG USE_CN_MIRROR ARG USE_CN_MIRROR
ARG NEXT_PUBLIC_BASE_PATH
ARG NEXT_PUBLIC_SENTRY_DSN
ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
ARG NEXT_PUBLIC_POSTHOG_HOST
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_ANALYTICS_UMAMI
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
ENV NEXT_PUBLIC_SERVICE_MODE="server" \ ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"
APP_URL="http://192.168.15.199:3210" \
DATABASE_DRIVER="node" \
DATABASE_URL="postgresql://postgres:uWNZugjBqixf8dxC@postgresql:5432/lobechat" \
KEY_VAULTS_SECRET="Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ="
# Sentry # Sentry
ENV NEXT_PUBLIC_SENTRY_DSN="" \ ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
SENTRY_ORG="" \ SENTRY_ORG="" \
SENTRY_PROJECT="" SENTRY_PROJECT=""
# Posthog # Posthog
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \ ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
NEXT_PUBLIC_POSTHOG_HOST="" \ NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
NEXT_PUBLIC_POSTHOG_KEY="" NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"
# Umami # Umami
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \ ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \ NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID="" NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"
# Node # Node
ENV NODE_OPTIONS="--max-old-space-size=8192" ENV NODE_OPTIONS="--max-old-space-size=8192"
WORKDIR /app WORKDIR /app
COPY package.json ./ COPY package.json pnpm-workspace.yaml ./
COPY .npmrc ./ COPY .npmrc ./
COPY packages ./packages
RUN \ RUN \
# If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true # If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \ if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
export SENTRYCLI_CDNURL="https://npmmirror.com/mirrors/sentry-cli"; \ export SENTRYCLI_CDNURL="https://npmmirror.com/mirrors/sentry-cli"; \
npm config set registry "https://registry.npmmirror.com/"; \ npm config set registry "https://registry.npmmirror.com/"; \
echo 'canvas_binary_host_mirror=https://npmmirror.com/mirrors/canvas' >> .npmrc; \
fi \ fi \
# Set the registry for corepack # Set the registry for corepack
&& export COREPACK_NPM_REGISTRY=$(npm config get registry | sed 's/\/$//') \ && export COREPACK_NPM_REGISTRY=$(npm config get registry | sed 's/\/$//') \
# Update corepack to latest (nodejs/corepack#612)
&& npm i -g corepack@latest \
# Enable corepack # Enable corepack
&& corepack enable \ && corepack enable \
# Use pnpm for corepack # Use pnpm for corepack
&& corepack use $(sed -n 's/.*"packageManager": "\(.*\)".*/\1/p' package.json) \ && corepack use $(sed -n 's/.*"packageManager": "\(.*\)".*/\1/p' package.json) \
# Install the dependencies # Install the dependencies
&& pnpm i \ && pnpm i
# Add sharp and db migration dependencies
&& mkdir -p /deps \
&& pnpm add sharp pg drizzle-orm --prefix /deps
COPY . . COPY . .
@ -84,162 +95,145 @@ COPY . .
RUN npm run build:docker RUN npm run build:docker
## Application image, copy all the files for production ## Application image, copy all the files for production
FROM scratch AS app FROM busybox:latest AS app
COPY --from=builder /app/public /app/public COPY --from=base /distroless/ /
# Automatically leverage output traces to reduce image size # Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing # https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder /app/.next/standalone /app/ COPY --from=builder /app/.next/standalone /app/
COPY --from=builder /app/.next/static /app/.next/static
# copy dependencies # Copy server launcher
COPY --from=builder /deps/node_modules/.pnpm /app/node_modules/.pnpm COPY --from=builder /app/scripts/serverLauncher/startServer.js /app/startServer.js
COPY --from=builder /deps/node_modules/pg /app/node_modules/pg
COPY --from=builder /deps/node_modules/drizzle-orm /app/node_modules/drizzle-orm
# Copy database migrations RUN \
COPY --from=builder /app/src/database/server/migrations /app/migrations # Add nextjs:nodejs to run the app
COPY --from=builder /app/scripts/migrateServerDB/docker.cjs /app/docker.cjs addgroup -S -g 1001 nodejs \
COPY --from=builder /app/scripts/migrateServerDB/errorHint.js /app/errorHint.js && adduser -D -G nodejs -H -S -h /app -u 1001 nextjs \
# Set permission for nextjs:nodejs
&& chown -R nextjs:nodejs /app /etc/proxychains4.conf
## Production image, copy all the files and run next ## Production image, copy all the files and run next
FROM base FROM scratch
# Copy all the files from app, set the correct permission for prerender cache # Copy all the files from app, set the correct permission for prerender cache
COPY --from=app --chown=nextjs:nodejs /app /app COPY --from=app / /
ENV NODE_ENV="production" \ ENV NODE_ENV="production" \
NODE_TLS_REJECT_UNAUTHORIZED="" NODE_OPTIONS="--dns-result-order=ipv4first --use-openssl-ca" \
NODE_EXTRA_CA_CERTS="" \
NODE_TLS_REJECT_UNAUTHORIZED="" \
SSL_CERT_DIR="/etc/ssl/certs/ca-certificates.crt"
# Make the middleware rewrite through local as default
# refs: https://github.com/lobehub/lobe-chat/issues/5876
ENV MIDDLEWARE_REWRITE_THROUGH_LOCAL="1"
# set hostname to localhost # set hostname to localhost
ENV HOSTNAME="192.168.15.199" \ ENV HOSTNAME="0.0.0.0" \
PORT="3210" PORT="3210"
# General Variables # General Variables
ENV ACCESS_CODE="" \ ENV ACCESS_CODE="" \
APP_URL="http://192.168.15.199:3210" \
API_KEY_SELECT_MODE="" \ API_KEY_SELECT_MODE="" \
DEFAULT_AGENT_CONFIG="" \ DEFAULT_AGENT_CONFIG="" \
SYSTEM_AGENT="" \ SYSTEM_AGENT="" \
FEATURE_FLAGS="" \ FEATURE_FLAGS="" \
PROXY_URL="" PROXY_URL=""
# Database
ENV KEY_VAULTS_SECRET="Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ=" \
DATABASE_DRIVER="node" \
DATABASE_URL="postgresql://postgres:uWNZugjBqixf8dxC@postgresql:5432/lobechat"
# Next Auth
ENV NEXT_AUTH_SECRET="BIoLJoshK5E0R4BxLl4tkDedzIZkdyam/MB+vThTAT0=" \
NEXT_AUTH_SSO_PROVIDERS="casdoor" \
NEXTAUTH_URL="http://192.168.15.199:3210/api/auth"
# S3
ENV NEXT_PUBLIC_S3_DOMAIN="" \
S3_PUBLIC_DOMAIN="http://192.168.15.199:9000" \
S3_ACCESS_KEY_ID="soaucnP8Bip0TDdUjxng" \
S3_BUCKET="casdoor" \
S3_ENDPOINT="http://192.168.15.199:9000" \
S3_SECRET_ACCESS_KEY="ZPUzvY34umfcfxvWKSv0P00vczVMB6YmgJS5J9eO"
# Model Variables # Model Variables
ENV \ ENV \
# AI21 # AI21
AI21_API_KEY="" \ AI21_API_KEY="" AI21_MODEL_LIST="" \
# Ai360 # Ai360
AI360_API_KEY="" \ AI360_API_KEY="" AI360_MODEL_LIST="" \
# Anthropic # Anthropic
ANTHROPIC_API_KEY="" ANTHROPIC_PROXY_URL="" \ ANTHROPIC_API_KEY="" ANTHROPIC_MODEL_LIST="" ANTHROPIC_PROXY_URL="" \
# Amazon Bedrock # Amazon Bedrock
AWS_ACCESS_KEY_ID="" AWS_SECRET_ACCESS_KEY="" AWS_REGION="" AWS_BEDROCK_MODEL_LIST="" \ AWS_ACCESS_KEY_ID="" AWS_SECRET_ACCESS_KEY="" AWS_REGION="" AWS_BEDROCK_MODEL_LIST="" \
# Azure OpenAI # Azure OpenAI
AZURE_API_KEY="" AZURE_API_VERSION="" AZURE_ENDPOINT="" AZURE_MODEL_LIST="" \ AZURE_API_KEY="" AZURE_API_VERSION="" AZURE_ENDPOINT="" AZURE_MODEL_LIST="" \
# Baichuan # Baichuan
BAICHUAN_API_KEY="" \ BAICHUAN_API_KEY="" BAICHUAN_MODEL_LIST="" \
# Cloudflare
CLOUDFLARE_API_KEY="" CLOUDFLARE_BASE_URL_OR_ACCOUNT_ID="" CLOUDFLARE_MODEL_LIST="" \
# DeepSeek # DeepSeek
DEEPSEEK_API_KEY="" \ DEEPSEEK_API_KEY="" DEEPSEEK_MODEL_LIST="" \
# Fireworks AI # Fireworks AI
FIREWORKSAI_API_KEY="" FIREWORKSAI_MODEL_LIST="" \ FIREWORKSAI_API_KEY="" FIREWORKSAI_MODEL_LIST="" \
# Gitee AI
GITEE_AI_API_KEY="" GITEE_AI_MODEL_LIST="" \
# GitHub # GitHub
GITHUB_TOKEN="" GITHUB_MODEL_LIST="" \ GITHUB_TOKEN="" GITHUB_MODEL_LIST="" \
# Google # Google
GOOGLE_API_KEY="" GOOGLE_PROXY_URL="" \ GOOGLE_API_KEY="" GOOGLE_MODEL_LIST="" GOOGLE_PROXY_URL="" \
# Groq # Groq
GROQ_API_KEY="" GROQ_MODEL_LIST="" GROQ_PROXY_URL="" \ GROQ_API_KEY="" GROQ_MODEL_LIST="" GROQ_PROXY_URL="" \
# Higress
HIGRESS_API_KEY="" HIGRESS_MODEL_LIST="" HIGRESS_PROXY_URL="" \
# HuggingFace
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Jina
JINA_API_KEY="" JINA_MODEL_LIST="" JINA_PROXY_URL="" \
# Minimax # Minimax
MINIMAX_API_KEY="" \ MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral # Mistral
MISTRAL_API_KEY="" \ MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
# Moonshot # Moonshot
MOONSHOT_API_KEY="" MOONSHOT_PROXY_URL="" \ MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
# Novita # Novita
NOVITA_API_KEY="" NOVITA_MODEL_LIST="" \ NOVITA_API_KEY="" NOVITA_MODEL_LIST="" \
# Nvidia NIM
NVIDIA_API_KEY="" NVIDIA_MODEL_LIST="" NVIDIA_PROXY_URL="" \
# Ollama # Ollama
OLLAMA_MODEL_LIST="" OLLAMA_PROXY_URL="" \ ENABLED_OLLAMA="" OLLAMA_MODEL_LIST="" OLLAMA_PROXY_URL="" \
# OpenAI # OpenAI
OPENAI_API_KEY="" OPENAI_MODEL_LIST="" OPENAI_PROXY_URL="" \ OPENAI_API_KEY="" OPENAI_MODEL_LIST="" OPENAI_PROXY_URL="" \
# OpenRouter # OpenRouter
OPENROUTER_API_KEY="" OPENROUTER_MODEL_LIST="" \ OPENROUTER_API_KEY="" OPENROUTER_MODEL_LIST="" \
# Perplexity # Perplexity
PERPLEXITY_API_KEY="" PERPLEXITY_PROXY_URL="" \ PERPLEXITY_API_KEY="" PERPLEXITY_MODEL_LIST="" PERPLEXITY_PROXY_URL="" \
# PPIO
PPIO_API_KEY="" PPIO_MODEL_LIST="" \
# Qwen # Qwen
QWEN_API_KEY="" QWEN_MODEL_LIST="" \ QWEN_API_KEY="" QWEN_MODEL_LIST="" QWEN_PROXY_URL="" \
# SambaNova
SAMBANOVA_API_KEY="" SAMBANOVA_MODEL_LIST="" \
# SenseNova
SENSENOVA_API_KEY="" SENSENOVA_MODEL_LIST="" \
# SiliconCloud # SiliconCloud
SILICONCLOUD_API_KEY="" SILICONCLOUD_MODEL_LIST="" SILICONCLOUD_PROXY_URL="" \ SILICONCLOUD_API_KEY="" SILICONCLOUD_MODEL_LIST="" SILICONCLOUD_PROXY_URL="" \
# Spark # Spark
SPARK_API_KEY="" \ SPARK_API_KEY="" SPARK_MODEL_LIST="" \
# Stepfun # Stepfun
STEPFUN_API_KEY="" \ STEPFUN_API_KEY="" STEPFUN_MODEL_LIST="" \
# Taichu # Taichu
TAICHU_API_KEY="" \ TAICHU_API_KEY="" TAICHU_MODEL_LIST="" \
# TogetherAI # TogetherAI
TOGETHERAI_API_KEY="" TOGETHERAI_MODEL_LIST="" \ TOGETHERAI_API_KEY="" TOGETHERAI_MODEL_LIST="" \
# Upstage # Upstage
UPSTAGE_API_KEY="" \ UPSTAGE_API_KEY="" UPSTAGE_MODEL_LIST="" \
# vLLM
VLLM_API_KEY="" VLLM_MODEL_LIST="" VLLM_PROXY_URL="" \
# Wenxin
WENXIN_API_KEY="" WENXIN_MODEL_LIST="" \
# xAI
XAI_API_KEY="" XAI_MODEL_LIST="" XAI_PROXY_URL="" \
# 01.AI # 01.AI
ZEROONE_API_KEY="" ZEROONE_MODEL_LIST="" \ ZEROONE_API_KEY="" ZEROONE_MODEL_LIST="" \
# Zhipu # Zhipu
ZHIPU_API_KEY="" ZHIPU_API_KEY="" ZHIPU_MODEL_LIST="" \
# Tencent Cloud
TENCENT_CLOUD_API_KEY="" TENCENT_CLOUD_MODEL_LIST=""
USER nextjs USER nextjs
EXPOSE 3210/tcp EXPOSE 3210/tcp
CMD \ ENTRYPOINT ["/bin/node"]
if [ -n "$PROXY_URL" ]; then \
# Set regex for IPv4 CMD ["/app/startServer.js"]
IP_REGEX="^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$"; \
# Set proxychains command
PROXYCHAINS="proxychains -q"; \
# Parse the proxy URL
host_with_port="${PROXY_URL#*//}"; \
host="${host_with_port%%:*}"; \
port="${PROXY_URL##*:}"; \
protocol="${PROXY_URL%%://*}"; \
# Resolve to IP address if the host is a domain
if ! [[ "$host" =~ "$IP_REGEX" ]]; then \
nslookup=$(nslookup -q="A" "$host" | tail -n +3 | grep 'Address:'); \
if [ -n "$nslookup" ]; then \
host=$(echo "$nslookup" | tail -n 1 | awk '{print $2}'); \
fi; \
fi; \
# Generate proxychains configuration file
printf "%s\n" \
'localnet 127.0.0.0/255.0.0.0' \
'localnet ::1/128' \
'proxy_dns' \
'remote_dns_subnet 224' \
'strict_chain' \
'tcp_connect_time_out 8000' \
'tcp_read_time_out 15000' \
'[ProxyList]' \
"$protocol $host $port" \
> "/etc/proxychains4.conf"; \
fi; \
# Run migration
node "/app/docker.cjs"; \
if [ "$?" -eq "0" ]; then \
# Run the server
${PROXYCHAINS} node "/app/server.js"; \
fi;

@ -1,5 +1,8 @@
## Base image for all the stages ## Set global build ENV
FROM node:22-slim AS base ARG NODEJS_VERSION="22"
## Base image for all building stages
FROM node:${NODEJS_VERSION}-slim AS base
ARG USE_CN_MIRROR ARG USE_CN_MIRROR
@ -10,19 +13,22 @@ RUN \
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \ if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
sed -i "s/deb.debian.org/mirrors.ustc.edu.cn/g" "/etc/apt/sources.list.d/debian.sources"; \ sed -i "s/deb.debian.org/mirrors.ustc.edu.cn/g" "/etc/apt/sources.list.d/debian.sources"; \
fi \ fi \
# Add required package & update base package # Add required package
&& apt update \ && apt update \
&& apt install busybox proxychains-ng -qy \ && apt install ca-certificates proxychains-ng -qy \
&& apt full-upgrade -qy \ # Prepare required package to distroless
&& apt autoremove -qy --purge \ && mkdir -p /distroless/bin /distroless/etc /distroless/etc/ssl/certs /distroless/lib \
&& apt clean -qy \ # Copy proxychains to distroless
# Configure BusyBox && cp /usr/lib/$(arch)-linux-gnu/libproxychains.so.4 /distroless/lib/libproxychains.so.4 \
&& busybox --install -s \ && cp /usr/lib/$(arch)-linux-gnu/libdl.so.2 /distroless/lib/libdl.so.2 \
# Add nextjs:nodejs to run the app && cp /usr/bin/proxychains4 /distroless/bin/proxychains \
&& addgroup --system --gid 1001 nodejs \ && cp /etc/proxychains4.conf /distroless/etc/proxychains4.conf \
&& adduser --system --home "/app" --gid 1001 -uid 1001 nextjs \ # Copy node to distroless
# Set permission for nextjs:nodejs && cp /usr/lib/$(arch)-linux-gnu/libstdc++.so.6 /distroless/lib/libstdc++.so.6 \
&& chown -R nextjs:nodejs "/etc/proxychains4.conf" \ && cp /usr/lib/$(arch)-linux-gnu/libgcc_s.so.1 /distroless/lib/libgcc_s.so.1 \
&& cp /usr/local/bin/node /distroless/bin/node \
# Copy CA certificates to distroless
&& cp /etc/ssl/certs/ca-certificates.crt /distroless/etc/ssl/certs/ca-certificates.crt \
# Cleanup temp files # Cleanup temp files
&& rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/* && rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/*
@ -30,53 +36,72 @@ RUN \
FROM base AS builder FROM base AS builder
ARG USE_CN_MIRROR ARG USE_CN_MIRROR
ARG NEXT_PUBLIC_BASE_PATH
ENV NEXT_PUBLIC_SERVICE_MODE="server" \ ARG NEXT_PUBLIC_SERVICE_MODE
APP_URL="http://192.168.15.199:3210" \ ARG NEXT_PUBLIC_ENABLE_NEXT_AUTH
ARG NEXT_PUBLIC_SENTRY_DSN
ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
ARG NEXT_PUBLIC_POSTHOG_HOST
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_ANALYTICS_UMAMI
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"
ENV NEXT_PUBLIC_SERVICE_MODE="${NEXT_PUBLIC_SERVICE_MODE:-server}" \
NEXT_PUBLIC_ENABLE_NEXT_AUTH="${NEXT_PUBLIC_ENABLE_NEXT_AUTH:-1}" \
APP_URL="http://app.com" \
DATABASE_DRIVER="node" \ DATABASE_DRIVER="node" \
DATABASE_URL="postgresql://postgres:uWNZugjBqixf8dxC@postgresql:5432/lobechat" \ DATABASE_URL="postgres://postgres:password@localhost:5432/postgres" \
KEY_VAULTS_SECRET="Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ=" KEY_VAULTS_SECRET="use-for-build"
# Sentry # Sentry
ENV NEXT_PUBLIC_SENTRY_DSN="" \ ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
SENTRY_ORG="" \ SENTRY_ORG="" \
SENTRY_PROJECT="" SENTRY_PROJECT=""
# Posthog # Posthog
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \ ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
NEXT_PUBLIC_POSTHOG_HOST="" \ NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
NEXT_PUBLIC_POSTHOG_KEY="" NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"
# Umami # Umami
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \ ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \ NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID="" NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"
# Node # Node
ENV NODE_OPTIONS="--max-old-space-size=8192" ENV NODE_OPTIONS="--max-old-space-size=8192"
WORKDIR /app WORKDIR /app
COPY package.json ./ COPY package.json pnpm-workspace.yaml ./
COPY .npmrc ./ COPY .npmrc ./
COPY packages ./packages
RUN \ RUN \
# If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true # If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \ if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
export SENTRYCLI_CDNURL="https://npmmirror.com/mirrors/sentry-cli"; \ export SENTRYCLI_CDNURL="https://npmmirror.com/mirrors/sentry-cli"; \
npm config set registry "https://registry.npmmirror.com/"; \ npm config set registry "https://registry.npmmirror.com/"; \
echo 'canvas_binary_host_mirror=https://npmmirror.com/mirrors/canvas' >> .npmrc; \
fi \ fi \
# Set the registry for corepack # Set the registry for corepack
&& export COREPACK_NPM_REGISTRY=$(npm config get registry | sed 's/\/$//') \ && export COREPACK_NPM_REGISTRY=$(npm config get registry | sed 's/\/$//') \
# Update corepack to latest (nodejs/corepack#612)
&& npm i -g corepack@latest \
# Enable corepack # Enable corepack
&& corepack enable \ && corepack enable \
# Use pnpm for corepack # Use pnpm for corepack
&& corepack use pnpm \ && corepack use $(sed -n 's/.*"packageManager": "\(.*\)".*/\1/p' package.json) \
# Install the dependencies # Install the dependencies
&& pnpm i \ && pnpm i \
# Add sharp and db migration dependencies # Add db migration dependencies
&& mkdir -p /deps \ && mkdir -p /deps \
&& pnpm add sharp pg drizzle-orm --prefix /deps && cd /deps \
&& pnpm init \
&& pnpm add pg drizzle-orm
COPY . . COPY . .
@ -84,41 +109,57 @@ COPY . .
RUN npm run build:docker RUN npm run build:docker
## Application image, copy all the files for production ## Application image, copy all the files for production
FROM scratch AS app FROM busybox:latest AS app
COPY --from=builder /app/public /app/public COPY --from=base /distroless/ /
# Automatically leverage output traces to reduce image size # Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing # https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder /app/.next/standalone /app/ COPY --from=builder /app/.next/standalone /app/
COPY --from=builder /app/.next/static /app/.next/static
# Copy database migrations
COPY --from=builder /app/src/database/migrations /app/migrations
COPY --from=builder /app/scripts/migrateServerDB/docker.cjs /app/docker.cjs
COPY --from=builder /app/scripts/migrateServerDB/errorHint.js /app/errorHint.js
# copy dependencies # copy dependencies
COPY --from=builder /deps/node_modules/.pnpm /app/node_modules/.pnpm COPY --from=builder /deps/node_modules/.pnpm /app/node_modules/.pnpm
COPY --from=builder /deps/node_modules/pg /app/node_modules/pg COPY --from=builder /deps/node_modules/pg /app/node_modules/pg
COPY --from=builder /deps/node_modules/drizzle-orm /app/node_modules/drizzle-orm COPY --from=builder /deps/node_modules/drizzle-orm /app/node_modules/drizzle-orm
# Copy database migrations # Copy server launcher
COPY --from=builder /app/src/database/server/migrations /app/migrations COPY --from=builder /app/scripts/serverLauncher/startServer.js /app/startServer.js
COPY --from=builder /app/scripts/migrateServerDB/docker.cjs /app/docker.cjs
COPY --from=builder /app/scripts/migrateServerDB/errorHint.js /app/errorHint.js RUN \
# Add nextjs:nodejs to run the app
addgroup -S -g 1001 nodejs \
&& adduser -D -G nodejs -H -S -h /app -u 1001 nextjs \
# Set permission for nextjs:nodejs
&& chown -R nextjs:nodejs /app /etc/proxychains4.conf
## Production image, copy all the files and run next ## Production image, copy all the files and run next
FROM base FROM scratch
# Copy all the files from app, set the correct permission for prerender cache # Copy all the files from app, set the correct permission for prerender cache
COPY --from=app --chown=nextjs:nodejs /app /app COPY --from=app / /
ENV NODE_ENV="production" \ ENV NODE_ENV="production" \
NODE_TLS_REJECT_UNAUTHORIZED="" NODE_OPTIONS="--dns-result-order=ipv4first --use-openssl-ca" \
NODE_EXTRA_CA_CERTS="" \
NODE_TLS_REJECT_UNAUTHORIZED="" \
SSL_CERT_DIR="/etc/ssl/certs/ca-certificates.crt"
# Make the middleware rewrite through local as default
# refs: https://github.com/lobehub/lobe-chat/issues/5876
ENV MIDDLEWARE_REWRITE_THROUGH_LOCAL="1"
# set hostname to localhost # set hostname to localhost
ENV HOSTNAME="192.168.15.199" \ ENV HOSTNAME="0.0.0.0" \
PORT="3210" PORT="3210"
# General Variables # General Variables
ENV ACCESS_CODE="" \ ENV ACCESS_CODE="" \
APP_URL="http://192.168.15.199:3210" \ APP_URL="" \
API_KEY_SELECT_MODE="" \ API_KEY_SELECT_MODE="" \
DEFAULT_AGENT_CONFIG="" \ DEFAULT_AGENT_CONFIG="" \
SYSTEM_AGENT="" \ SYSTEM_AGENT="" \
@ -126,120 +167,116 @@ ENV ACCESS_CODE="" \
PROXY_URL="" PROXY_URL=""
# Database # Database
ENV KEY_VAULTS_SECRET="Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ=" \ ENV KEY_VAULTS_SECRET="" \
DATABASE_DRIVER="node" \ DATABASE_DRIVER="node" \
DATABASE_URL="postgresql://postgres:uWNZugjBqixf8dxC@postgresql:5432/lobechat" DATABASE_URL=""
# Next Auth # Next Auth
ENV NEXT_AUTH_SECRET="NX2kaPE923dt6BL2U8e9oSre5RfoT7hg" \ ENV NEXT_AUTH_SECRET="" \
NEXT_AUTH_SSO_PROVIDERS="casdoor" \ NEXT_AUTH_SSO_PROVIDERS="" \
NEXTAUTH_URL="http://192.168.15.199:3210/api/auth" NEXTAUTH_URL=""
# S3 # S3
ENV NEXT_PUBLIC_S3_DOMAIN="" \ ENV NEXT_PUBLIC_S3_DOMAIN="" \
S3_PUBLIC_DOMAIN="http://192.168.15.199:9000" \ S3_PUBLIC_DOMAIN="" \
S3_ACCESS_KEY_ID="soaucnP8Bip0TDdUjxng" \ S3_ACCESS_KEY_ID="" \
S3_BUCKET="casdoor" \ S3_BUCKET="" \
S3_ENDPOINT="http://192.168.15.199:9000" \ S3_ENDPOINT="" \
S3_SECRET_ACCESS_KEY="ZPUzvY34umfcfxvWKSv0P00vczVMB6YmgJS5J9eO" S3_SECRET_ACCESS_KEY=""
# Model Variables # Model Variables
ENV \ ENV \
# AI21 # AI21
AI21_API_KEY="" \ AI21_API_KEY="" AI21_MODEL_LIST="" \
# Ai360 # Ai360
AI360_API_KEY="" \ AI360_API_KEY="" AI360_MODEL_LIST="" \
# Anthropic # Anthropic
ANTHROPIC_API_KEY="" ANTHROPIC_PROXY_URL="" \ ANTHROPIC_API_KEY="" ANTHROPIC_MODEL_LIST="" ANTHROPIC_PROXY_URL="" \
# Amazon Bedrock # Amazon Bedrock
AWS_ACCESS_KEY_ID="" AWS_SECRET_ACCESS_KEY="" AWS_REGION="" AWS_BEDROCK_MODEL_LIST="" \ AWS_ACCESS_KEY_ID="" AWS_SECRET_ACCESS_KEY="" AWS_REGION="" AWS_BEDROCK_MODEL_LIST="" \
# Azure OpenAI # Azure OpenAI
AZURE_API_KEY="" AZURE_API_VERSION="" AZURE_ENDPOINT="" AZURE_MODEL_LIST="" \ AZURE_API_KEY="" AZURE_API_VERSION="" AZURE_ENDPOINT="" AZURE_MODEL_LIST="" \
# Baichuan # Baichuan
BAICHUAN_API_KEY="" \ BAICHUAN_API_KEY="" BAICHUAN_MODEL_LIST="" \
# Cloudflare
CLOUDFLARE_API_KEY="" CLOUDFLARE_BASE_URL_OR_ACCOUNT_ID="" CLOUDFLARE_MODEL_LIST="" \
# DeepSeek # DeepSeek
DEEPSEEK_API_KEY="" \ DEEPSEEK_API_KEY="" DEEPSEEK_MODEL_LIST="" \
# Fireworks AI # Fireworks AI
FIREWORKSAI_API_KEY="" FIREWORKSAI_MODEL_LIST="" \ FIREWORKSAI_API_KEY="" FIREWORKSAI_MODEL_LIST="" \
# Gitee AI
GITEE_AI_API_KEY="" GITEE_AI_MODEL_LIST="" \
# GitHub # GitHub
GITHUB_TOKEN="" GITHUB_MODEL_LIST="" \ GITHUB_TOKEN="" GITHUB_MODEL_LIST="" \
# Google # Google
GOOGLE_API_KEY="" GOOGLE_PROXY_URL="" \ GOOGLE_API_KEY="" GOOGLE_MODEL_LIST="" GOOGLE_PROXY_URL="" \
# Groq # Groq
GROQ_API_KEY="" GROQ_MODEL_LIST="" GROQ_PROXY_URL="" \ GROQ_API_KEY="" GROQ_MODEL_LIST="" GROQ_PROXY_URL="" \
# Higress
HIGRESS_API_KEY="" HIGRESS_MODEL_LIST="" HIGRESS_PROXY_URL="" \
# HuggingFace
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Jina
JINA_API_KEY="" JINA_MODEL_LIST="" JINA_PROXY_URL="" \
# Minimax # Minimax
MINIMAX_API_KEY="" \ MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral # Mistral
MISTRAL_API_KEY="" \ MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
# Moonshot # Moonshot
MOONSHOT_API_KEY="" MOONSHOT_PROXY_URL="" \ MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
# Novita # Novita
NOVITA_API_KEY="" NOVITA_MODEL_LIST="" \ NOVITA_API_KEY="" NOVITA_MODEL_LIST="" \
# Nvidia NIM
NVIDIA_API_KEY="" NVIDIA_MODEL_LIST="" NVIDIA_PROXY_URL="" \
# Ollama # Ollama
OLLAMA_MODEL_LIST="" OLLAMA_PROXY_URL="" \ ENABLED_OLLAMA="" OLLAMA_MODEL_LIST="" OLLAMA_PROXY_URL="" \
# OpenAI # OpenAI
OPENAI_API_KEY="" OPENAI_MODEL_LIST="" OPENAI_PROXY_URL="" \ OPENAI_API_KEY="" OPENAI_MODEL_LIST="" OPENAI_PROXY_URL="" \
# OpenRouter # OpenRouter
OPENROUTER_API_KEY="" OPENROUTER_MODEL_LIST="" \ OPENROUTER_API_KEY="" OPENROUTER_MODEL_LIST="" \
# Perplexity # Perplexity
PERPLEXITY_API_KEY="" PERPLEXITY_PROXY_URL="" \ PERPLEXITY_API_KEY="" PERPLEXITY_MODEL_LIST="" PERPLEXITY_PROXY_URL="" \
# PPIO
PPIO_API_KEY="" PPIO_MODEL_LIST="" \
# Qwen # Qwen
QWEN_API_KEY="" QWEN_MODEL_LIST="" \ QWEN_API_KEY="" QWEN_MODEL_LIST="" QWEN_PROXY_URL="" \
# SambaNova
SAMBANOVA_API_KEY="" SAMBANOVA_MODEL_LIST="" \
# SenseNova
SENSENOVA_API_KEY="" SENSENOVA_MODEL_LIST="" \
# SiliconCloud # SiliconCloud
SILICONCLOUD_API_KEY="" SILICONCLOUD_MODEL_LIST="" SILICONCLOUD_PROXY_URL="" \ SILICONCLOUD_API_KEY="" SILICONCLOUD_MODEL_LIST="" SILICONCLOUD_PROXY_URL="" \
# Spark # Spark
SPARK_API_KEY="" \ SPARK_API_KEY="" SPARK_MODEL_LIST="" \
# Stepfun # Stepfun
STEPFUN_API_KEY="" \ STEPFUN_API_KEY="" STEPFUN_MODEL_LIST="" \
# Taichu # Taichu
TAICHU_API_KEY="" \ TAICHU_API_KEY="" TAICHU_MODEL_LIST="" \
# TogetherAI # TogetherAI
TOGETHERAI_API_KEY="" TOGETHERAI_MODEL_LIST="" \ TOGETHERAI_API_KEY="" TOGETHERAI_MODEL_LIST="" \
# Upstage # Upstage
UPSTAGE_API_KEY="" \ UPSTAGE_API_KEY="" UPSTAGE_MODEL_LIST="" \
# vLLM
VLLM_API_KEY="" VLLM_MODEL_LIST="" VLLM_PROXY_URL="" \
# Wenxin
WENXIN_API_KEY="" WENXIN_MODEL_LIST="" \
# xAI
XAI_API_KEY="" XAI_MODEL_LIST="" XAI_PROXY_URL="" \
# 01.AI # 01.AI
ZEROONE_API_KEY="" ZEROONE_MODEL_LIST="" \ ZEROONE_API_KEY="" ZEROONE_MODEL_LIST="" \
# Zhipu # Zhipu
ZHIPU_API_KEY="" ZHIPU_API_KEY="" ZHIPU_MODEL_LIST="" \
# Tencent Cloud
TENCENT_CLOUD_API_KEY="" TENCENT_CLOUD_MODEL_LIST=""
USER nextjs USER nextjs
EXPOSE 3210/tcp EXPOSE 3210/tcp
CMD \ ENTRYPOINT ["/bin/node"]
if [ -n "$PROXY_URL" ]; then \
# Set regex for IPv4 CMD ["/app/startServer.js"]
IP_REGEX="^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$"; \
# Set proxychains command
PROXYCHAINS="proxychains -q"; \
# Parse the proxy URL
host_with_port="${PROXY_URL#*//}"; \
host="${host_with_port%%:*}"; \
port="${PROXY_URL##*:}"; \
protocol="${PROXY_URL%%://*}"; \
# Resolve to IP address if the host is a domain
if ! [[ "$host" =~ "$IP_REGEX" ]]; then \
nslookup=$(nslookup -q="A" "$host" | tail -n +3 | grep 'Address:'); \
if [ -n "$nslookup" ]; then \
host=$(echo "$nslookup" | tail -n 1 | awk '{print $2}'); \
fi; \
fi; \
# Generate proxychains configuration file
printf "%s\n" \
'localnet 127.0.0.0/255.0.0.0' \
'localnet ::1/128' \
'proxy_dns' \
'remote_dns_subnet 224' \
'strict_chain' \
'tcp_connect_time_out 8000' \
'tcp_read_time_out 15000' \
'[ProxyList]' \
"$protocol $host $port" \
> "/etc/proxychains4.conf"; \
fi; \
# Run migration
node "/app/docker.cjs"; \
if [ "$?" -eq "0" ]; then \
# Run the server
${PROXYCHAINS} node "/app/server.js"; \
fi;

@ -0,0 +1,238 @@
## Set global build ENV
ARG NODEJS_VERSION="22"
## Base image for all building stages
FROM node:${NODEJS_VERSION}-slim AS base
ARG USE_CN_MIRROR
ENV DEBIAN_FRONTEND="noninteractive"
RUN \
# If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
sed -i "s/deb.debian.org/mirrors.ustc.edu.cn/g" "/etc/apt/sources.list.d/debian.sources"; \
fi \
# Add required package
&& apt update \
&& apt install ca-certificates proxychains-ng -qy \
# Prepare required package to distroless
&& mkdir -p /distroless/bin /distroless/etc /distroless/etc/ssl/certs /distroless/lib \
# Copy proxychains to distroless
&& cp /usr/lib/$(arch)-linux-gnu/libproxychains.so.4 /distroless/lib/libproxychains.so.4 \
&& cp /usr/lib/$(arch)-linux-gnu/libdl.so.2 /distroless/lib/libdl.so.2 \
&& cp /usr/bin/proxychains4 /distroless/bin/proxychains \
&& cp /etc/proxychains4.conf /distroless/etc/proxychains4.conf \
# Copy node to distroless
&& cp /usr/lib/$(arch)-linux-gnu/libstdc++.so.6 /distroless/lib/libstdc++.so.6 \
&& cp /usr/lib/$(arch)-linux-gnu/libgcc_s.so.1 /distroless/lib/libgcc_s.so.1 \
&& cp /usr/local/bin/node /distroless/bin/node \
# Copy CA certificates to distroless
&& cp /etc/ssl/certs/ca-certificates.crt /distroless/etc/ssl/certs/ca-certificates.crt \
# Cleanup temp files
&& rm -rf /tmp/* /var/lib/apt/lists/* /var/tmp/*
## Builder image, install all the dependencies and build the app
FROM base AS builder
ARG USE_CN_MIRROR
ARG NEXT_PUBLIC_BASE_PATH
ARG NEXT_PUBLIC_SENTRY_DSN
ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
ARG NEXT_PUBLIC_POSTHOG_HOST
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_ANALYTICS_UMAMI
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID
ENV NEXT_PUBLIC_CLIENT_DB="pglite"
ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"
# Sentry
ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
SENTRY_ORG="" \
SENTRY_PROJECT=""
# Posthog
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"
# Umami
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"
# Node
ENV NODE_OPTIONS="--max-old-space-size=8192"
WORKDIR /app
COPY package.json pnpm-workspace.yaml ./
COPY .npmrc ./
COPY packages ./packages
RUN \
# If you want to build docker in China, build with --build-arg USE_CN_MIRROR=true
if [ "${USE_CN_MIRROR:-false}" = "true" ]; then \
export SENTRYCLI_CDNURL="https://npmmirror.com/mirrors/sentry-cli"; \
npm config set registry "https://registry.npmmirror.com/"; \
echo 'canvas_binary_host_mirror=https://npmmirror.com/mirrors/canvas' >> .npmrc; \
fi \
# Set the registry for corepack
&& export COREPACK_NPM_REGISTRY=$(npm config get registry | sed 's/\/$//') \
# Update corepack to latest (nodejs/corepack#612)
&& npm i -g corepack@latest \
# Enable corepack
&& corepack enable \
# Use pnpm for corepack
&& corepack use $(sed -n 's/.*"packageManager": "\(.*\)".*/\1/p' package.json) \
# Install the dependencies
&& pnpm i
COPY . .
# run build standalone for docker version
RUN npm run build:docker
## Application image, copy all the files for production
FROM busybox:latest AS app
COPY --from=base /distroless/ /
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder /app/.next/standalone /app/
# Copy server launcher
COPY --from=builder /app/scripts/serverLauncher/startServer.js /app/startServer.js
RUN \
# Add nextjs:nodejs to run the app
addgroup -S -g 1001 nodejs \
&& adduser -D -G nodejs -H -S -h /app -u 1001 nextjs \
# Set permission for nextjs:nodejs
&& chown -R nextjs:nodejs /app /etc/proxychains4.conf
## Production image, copy all the files and run next
FROM scratch
# Copy all the files from app, set the correct permission for prerender cache
COPY --from=app / /
ENV NODE_ENV="production" \
NODE_OPTIONS="--dns-result-order=ipv4first --use-openssl-ca" \
NODE_EXTRA_CA_CERTS="" \
NODE_TLS_REJECT_UNAUTHORIZED="" \
SSL_CERT_DIR="/etc/ssl/certs/ca-certificates.crt"
# Make the middleware rewrite through local as default
# refs: https://github.com/lobehub/lobe-chat/issues/5876
ENV MIDDLEWARE_REWRITE_THROUGH_LOCAL="1"
# set hostname to localhost
ENV HOSTNAME="0.0.0.0" \
PORT="3210"
# General Variables
ENV ACCESS_CODE="" \
API_KEY_SELECT_MODE="" \
DEFAULT_AGENT_CONFIG="" \
SYSTEM_AGENT="" \
FEATURE_FLAGS="" \
PROXY_URL=""
# Model Variables
ENV \
# AI21
AI21_API_KEY="" AI21_MODEL_LIST="" \
# Ai360
AI360_API_KEY="" AI360_MODEL_LIST="" \
# Anthropic
ANTHROPIC_API_KEY="" ANTHROPIC_MODEL_LIST="" ANTHROPIC_PROXY_URL="" \
# Amazon Bedrock
AWS_ACCESS_KEY_ID="" AWS_SECRET_ACCESS_KEY="" AWS_REGION="" AWS_BEDROCK_MODEL_LIST="" \
# Azure OpenAI
AZURE_API_KEY="" AZURE_API_VERSION="" AZURE_ENDPOINT="" AZURE_MODEL_LIST="" \
# Baichuan
BAICHUAN_API_KEY="" BAICHUAN_MODEL_LIST="" \
# Cloudflare
CLOUDFLARE_API_KEY="" CLOUDFLARE_BASE_URL_OR_ACCOUNT_ID="" CLOUDFLARE_MODEL_LIST="" \
# DeepSeek
DEEPSEEK_API_KEY="" DEEPSEEK_MODEL_LIST="" \
# Fireworks AI
FIREWORKSAI_API_KEY="" FIREWORKSAI_MODEL_LIST="" \
# Gitee AI
GITEE_AI_API_KEY="" GITEE_AI_MODEL_LIST="" \
# GitHub
GITHUB_TOKEN="" GITHUB_MODEL_LIST="" \
# Google
GOOGLE_API_KEY="" GOOGLE_MODEL_LIST="" GOOGLE_PROXY_URL="" \
# Groq
GROQ_API_KEY="" GROQ_MODEL_LIST="" GROQ_PROXY_URL="" \
# Higress
HIGRESS_API_KEY="" HIGRESS_MODEL_LIST="" HIGRESS_PROXY_URL="" \
# HuggingFace
HUGGINGFACE_API_KEY="" HUGGINGFACE_MODEL_LIST="" HUGGINGFACE_PROXY_URL="" \
# Hunyuan
HUNYUAN_API_KEY="" HUNYUAN_MODEL_LIST="" \
# InternLM
INTERNLM_API_KEY="" INTERNLM_MODEL_LIST="" \
# Jina
JINA_API_KEY="" JINA_MODEL_LIST="" JINA_PROXY_URL="" \
# Minimax
MINIMAX_API_KEY="" MINIMAX_MODEL_LIST="" \
# Mistral
MISTRAL_API_KEY="" MISTRAL_MODEL_LIST="" \
# Moonshot
MOONSHOT_API_KEY="" MOONSHOT_MODEL_LIST="" MOONSHOT_PROXY_URL="" \
# Novita
NOVITA_API_KEY="" NOVITA_MODEL_LIST="" \
# Nvidia NIM
NVIDIA_API_KEY="" NVIDIA_MODEL_LIST="" NVIDIA_PROXY_URL="" \
# Ollama
ENABLED_OLLAMA="" OLLAMA_MODEL_LIST="" OLLAMA_PROXY_URL="" \
# OpenAI
OPENAI_API_KEY="" OPENAI_MODEL_LIST="" OPENAI_PROXY_URL="" \
# OpenRouter
OPENROUTER_API_KEY="" OPENROUTER_MODEL_LIST="" \
# Perplexity
PERPLEXITY_API_KEY="" PERPLEXITY_MODEL_LIST="" PERPLEXITY_PROXY_URL="" \
# Qwen
QWEN_API_KEY="" QWEN_MODEL_LIST="" QWEN_PROXY_URL="" \
# SambaNova
SAMBANOVA_API_KEY="" SAMBANOVA_MODEL_LIST="" \
# SenseNova
SENSENOVA_API_KEY="" SENSENOVA_MODEL_LIST="" \
# SiliconCloud
SILICONCLOUD_API_KEY="" SILICONCLOUD_MODEL_LIST="" SILICONCLOUD_PROXY_URL="" \
# Spark
SPARK_API_KEY="" SPARK_MODEL_LIST="" \
# Stepfun
STEPFUN_API_KEY="" STEPFUN_MODEL_LIST="" \
# Taichu
TAICHU_API_KEY="" TAICHU_MODEL_LIST="" \
# TogetherAI
TOGETHERAI_API_KEY="" TOGETHERAI_MODEL_LIST="" \
# Upstage
UPSTAGE_API_KEY="" UPSTAGE_MODEL_LIST="" \
# vLLM
VLLM_API_KEY="" VLLM_MODEL_LIST="" VLLM_PROXY_URL="" \
# Wenxin
WENXIN_API_KEY="" WENXIN_MODEL_LIST="" \
# xAI
XAI_API_KEY="" XAI_MODEL_LIST="" XAI_PROXY_URL="" \
# 01.AI
ZEROONE_API_KEY="" ZEROONE_MODEL_LIST="" \
# Zhipu
ZHIPU_API_KEY="" ZHIPU_MODEL_LIST="" \
# Tencent Cloud
TENCENT_CLOUD_API_KEY="" TENCENT_CLOUD_MODEL_LIST=""
USER nextjs
EXPOSE 3210/tcp
ENTRYPOINT ["/bin/node"]
CMD ["/app/startServer.js"]

@ -1,809 +0,0 @@
<div align="center"><a name="readme-top"></a>
[![][image-banner]][vercel-link]
# Lobe Chat
オープンソースのモダンデザインChatGPT/LLMs UI/フレームワーク。<br/>
音声合成、マルチモーダル、拡張可能な([function call][docs-functionc-call])プラグインシステムをサポート。<br/>
プライベートなOpenAI ChatGPT/Claude/Gemini/Groq/Ollamaチャットアプリケーションをワンクリックで**無料**でデプロイ。
[English](./README.md) · [简体中文](./README.zh-CN.md) · **日本語** · [公式サイト][official-site] · [変更履歴](./CHANGELOG.md) · [ドキュメント][docs] · [ブログ][blog] · [フィードバック][github-issues-link]
<!-- SHIELD GROUP -->
[![][github-release-shield]][github-release-link]
[![][docker-release-shield]][docker-release-link]
[![][vercel-shield]][vercel-link]
[![][discord-shield]][discord-link]<br/>
[![][codecov-shield]][codecov-link]
[![][github-action-test-shield]][github-action-test-link]
[![][github-action-release-shield]][github-action-release-link]
[![][github-releasedate-shield]][github-releasedate-link]<br/>
[![][github-contributors-shield]][github-contributors-link]
[![][github-forks-shield]][github-forks-link]
[![][github-stars-shield]][github-stars-link]
[![][github-issues-shield]][github-issues-link]
[![][github-license-shield]][github-license-link]<br>
[![][sponsor-shield]][sponsor-link]
**LobeChatリポジトリを共有**
[![][share-x-shield]][share-x-link]
[![][share-telegram-shield]][share-telegram-link]
[![][share-whatsapp-shield]][share-whatsapp-link]
[![][share-reddit-shield]][share-reddit-link]
[![][share-weibo-shield]][share-weibo-link]
[![][share-mastodon-shield]][share-mastodon-link]
[![][share-linkedin-shield]][share-linkedin-link]
<sup>新しい時代の思考と創造を先導します。あなたのために、スーパー個人のために作られました。</sup>
[![][github-trending-shield]][github-trending-url]
[![][image-overview]][vercel-link]
</div>
<details>
<summary><kbd>目次</kbd></summary>
#### TOC
- [👋🏻 はじめに & コミュニティに参加](#-はじめに--コミュニティに参加)
- [✨ 特徴](#-特徴)
- [`1` マルチモデルサービスプロバイダーのサポート](#1-マルチモデルサービスプロバイダーのサポート)
- [`2` ローカル大規模言語モデル (LLM) のサポート](#2-ローカル大規模言語モデル-llm-のサポート)
- [`3` モデルの視覚認識](#3-モデルの視覚認識)
- [`4` TTS & STT 音声会話](#4-tts--stt-音声会話)
- [`5` テキストから画像生成](#5-テキストから画像生成)
- [`6` プラグインシステム (Function Calling)](#6-プラグインシステム-function-calling)
- [`7` エージェントマーケット (GPTs)](#7-エージェントマーケット-gpts)
- [`8` ローカル / リモートデータベースのサポート](#8-ローカル--リモートデータベースのサポート)
- [`9` マルチユーザ管理のサポート](#9-マルチユーザ管理のサポート)
- [`10` プログレッシブウェブアプリ (PWA)](#10-プログレッシブウェブアプリ-pwa)
- [`11` モバイルデバイスの適応](#11-モバイルデバイスの適応)
- [`12` カスタムテーマ](#12-カスタムテーマ)
- [`*` その他の特徴](#-その他の特徴)
- [⚡️ パフォーマンス](#-パフォーマンス)
- [🛳 自己ホスティング](#-自己ホスティング)
- [`A` Vercel、Zeabur、Sealosでのデプロイ](#a-vercelzeabursealosでのデプロイ)
- [`B` Dockerでのデプロイ](#b-dockerでのデプロイ)
- [環境変数](#環境変数)
- [📦 エコシステム](#-エコシステム)
- [🧩 プラグイン](#-プラグイン)
- [⌨️ ローカル開発](#-ローカル開発)
- [🤝 貢献](#-貢献)
- [❤️ スポンサー](#-スポンサー)
- [🔗 その他の製品](#-その他の製品)
####
<br/>
</details>
## 👋🏻 はじめに & コミュニティに参加
私たちは、AIGCのためのモダンデザインコンポーネントとツールを提供することを目指すデザインエンジニアのグループです。
ブートストラッピングアプローチを採用することで、開発者とユーザーに対してよりオープンで透明性のある、使いやすい製品エコシステムを提供することを目指しています。
ユーザーやプロの開発者にとって、LobeHubはあなたのAIエージェントの遊び場となるでしょう。LobeChatは現在アクティブに開発中であり、遭遇した[問題][issues-link]についてのフィードバックを歓迎します。
| [![][vercel-shield-badge]][vercel-link] | インストールや登録は不要です!私たちのウェブサイトにアクセスして、直接体験してください。 |
| :---------------------------------------- | :----------------------------------------------------------------------------------------------------------------- |
| [![][discord-shield-badge]][discord-link] | 私たちのDiscordコミュニティに参加しましょうここでは、LobeHubの開発者や他の熱心なユーザーとつながることができます。 |
> \[!IMPORTANT]
>
> **スターを付けてください**。GitHubからのすべてのリリース通知を遅延なく受け取ることができます\~ ⭐️
[![][image-star]][github-stars-link]
<details>
<summary><kbd>スター履歴</kbd></summary>
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=lobehub%2Flobe-chat&theme=dark&type=Date">
<img width="100%" src="https://api.star-history.com/svg?repos=lobehub%2Flobe-chat&type=Date">
</picture>
</details>
## ✨ 特徴
[![][image-feat-privoder]][docs-feat-provider]
### `1` [マルチモデルサービスプロバイダーのサポート][docs-feat-provider]
LobeChatの継続的な開発において、AI会話サービスを提供する際のモデルサービスプロバイダーの多様性がコミュニティのニーズを満たすために重要であることを深く理解しています。そのため、単一のモデルサービスプロバイダーに限定せず、複数のモデルサービスプロバイダーをサポートすることで、ユーザーにより多様で豊富な会話の選択肢を提供しています。
このようにして、LobeChatは異なるユーザーのニーズにより柔軟に対応し、開発者にも幅広い選択肢を提供します。
#### サポートされているモデルサービスプロバイダー
以下のモデルサービスプロバイダーをサポートしています:
- **AWS Bedrock**AWS Bedrockサービスと統合され、**Claude / LLama2**などのモデルをサポートし、強力な自然言語処理能力を提供します。[詳細はこちら](https://aws.amazon.com/cn/bedrock)
- **Anthropic (Claude)**Anthropicの**Claude**シリーズモデルにアクセスし、Claude 3およびClaude 2を含む、マルチモーダル機能と拡張コンテキストで業界の新しいベンチマークを設定します。[詳細はこちら](https://www.anthropic.com/claude)
- **Google AI (Gemini Pro, Gemini Vision)**Googleの**Gemini**シリーズモデルにアクセスし、GeminiおよびGemini Proを含む、高度な言語理解と生成をサポートします。[詳細はこちら](https://deepmind.google/technologies/gemini/)
- **Groq**GroqのAIモデルにアクセスし、メッセージシーケンスを効率的に処理し、応答を生成し、マルチターンの対話や単一のインタラクションタスクを実行できます。[詳細はこちら](https://groq.com/)
- **OpenRouter****Claude 3**、**Gemma**、**Mistral**、**Llama2**、**Cohere**などのモデルのルーティングをサポートし、インテリジェントなルーティング最適化をサポートし、使用効率を向上させ、オープンで柔軟です。[詳細はこちら](https://openrouter.ai/)
- **01.AI (Yi Model)**01.AIモデルを統合し、推論速度が速いAPIシリーズを提供し、処理時間を短縮しながら優れたモデル性能を維持します。[詳細はこちら](https://01.ai/)
- **Together.ai**Together Inference APIを通じて、100以上の主要なオープンソースのチャット、言語、画像、コード、および埋め込みモデルにアクセスできます。これらのモデルについては、使用した分だけ支払います。[詳細はこちら](https://www.together.ai/)
- **ChatGLM**:智谱の**ChatGLM**シリーズモデルGLM-4/GLM-4-vision/GLM-3-turboを追加し、ユーザーにもう一つの効率的な会話モデルの選択肢を提供します。[詳細はこちら](https://www.zhipuai.cn/)
- **Moonshot AI (Dark Side of the Moon)**中国の革新的なAIスタートアップであるMoonshotシリーズモデルと統合し、より深い会話理解を提供します。[詳細はこちら](https://www.moonshot.cn/)
- **Minimax**Minimaxモデルを統合し、MoEモデル**abab6**を含む、より広範な選択肢を提供します。[詳細はこちら](https://www.minimaxi.com/)
- **DeepSeek**中国の革新的なAIスタートアップであるDeepSeekシリーズモデルと統合し、性能と価格のバランスを取ったモデルを提供します。[詳細はこちら](https://www.deepseek.com/)
- **Qwen**Qwenシリーズモデルを統合し、最新の**qwen-turbo**、**qwen-plus**、**qwen-max**を含む。[詳細はこちら](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction)
- **Novita AI****Llama**、**Mistral**、その他の主要なオープンソースモデルに最安値でアクセスできます。検閲されないロールプレイに参加し、創造的な議論を引き起こし、制限のないイノベーションを促進します。**使用した分だけ支払います。** [詳細はこちら](https://novita.ai/llm-api?utm_source=lobechat&utm_medium=ch&utm_campaign=api)
同時に、ReplicateやPerplexityなどのモデルサービスプロバイダーのサポートも計画しています。これにより、サービスプロバイダーのライブラリをさらに充実させることができます。LobeChatがあなたのお気に入りのサービスプロバイダーをサポートすることを希望する場合は、[コミュニティディスカッション](https://github.com/lobehub/lobe-chat/discussions/1284)に参加してください。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-local]][docs-feat-local]
### `2` [ローカル大規模言語モデル (LLM) のサポート][docs-feat-local]
特定のユーザーのニーズに応えるために、LobeChatは[Ollama](https://ollama.ai)に基づいてローカルモデルの使用をサポートしており、ユーザーが自分自身またはサードパーティのモデルを柔軟に使用できるようにしています。
> \[!TIP]
>
> [📘 LobeChatでのOllamaの使用][docs-usage-ollama]について詳しくはこちらをご覧ください。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-vision]][docs-feat-vision]
### `3` [モデルの視覚認識][docs-feat-vision]
LobeChatは、OpenAIの最新の視覚認識機能を備えた[`gpt-4-vision`](https://platform.openai.com/docs/guides/vision)モデルをサポートしています。
これは視覚を認識できるマルチモーダルインテリジェンスです。ユーザーは簡単に画像をアップロードしたり、画像をドラッグアンドドロップして対話ボックスに入れることができ、
エージェントは画像の内容を認識し、これに基づいてインテリジェントな会話を行い、よりスマートで多様なチャットシナリオを作成します。
この機能は、新しいインタラクティブな方法を提供し、コミュニケーションがテキストを超えて視覚要素を含むことを可能にします。
日常の使用での画像共有や特定の業界での画像解釈に関係なく、エージェントは優れた会話体験を提供します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-tts]][docs-feat-tts]
### `4` [TTS & STT 音声会話][docs-feat-tts]
LobeChatは、テキストから音声への変換Text-to-Speech、TTSおよび音声からテキストへの変換Speech-to-Text、STT技術をサポートしており、
テキストメッセージを明瞭な音声出力に変換し、ユーザーが実際の人と話しているかのように対話エージェントと対話できるようにします。
ユーザーは、エージェントに適した音声を選択することができます。
さらに、TTSは聴覚学習を好む人や忙しい中で情報を受け取りたい人にとって優れたソリューションを提供します。
LobeChatでは、異なる地域や文化的背景のユーザーのニーズに応えるために、さまざまな高品質の音声オプションOpenAI Audio、Microsoft Edge Speechを慎重に選択しました。
ユーザーは、個人の好みや特定のシナリオに応じて適切な音声を選択し、パーソナライズされたコミュニケーション体験を得ることができます。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-t2i]][docs-feat-t2i]
### `5` [テキストから画像生成][docs-feat-t2i]
最新のテキストから画像生成技術をサポートし、LobeChatはユーザーがエージェントとの対話中に直接画像作成ツールを呼び出すことができるようになりました。
[`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/)、[`Pollinations`](https://pollinations.ai/)などのAIツールの能力を活用することで、
エージェントはあなたのアイデアを画像に変えることができます。
これにより、プライベートで没入感のある創造プロセスが可能になり、個人的な対話に視覚的なストーリーテリングをシームレスに統合することができます。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-plugin]][docs-feat-plugin]
### `6` [プラグインシステム (Function Calling)][docs-feat-plugin]
LobeChatのプラグインエコシステムは、そのコア機能の重要な拡張であり、LobeChatアシスタントの実用性と柔軟性を大幅に向上させます。
<video controls src="https://github.com/lobehub/lobe-chat/assets/28616219/f29475a3-f346-4196-a435-41a6373ab9e2" muted="false"></video>
プラグインを利用することで、LobeChatアシスタントはリアルタイムの情報を取得して処理することができ、ウェブ情報を検索し、ユーザーに即時かつ関連性の高いニュースを提供することができます。
さらに、これらのプラグインはニュースの集約に限定されず、他の実用的な機能にも拡張できます。たとえば、ドキュメントの迅速な検索、画像の生成、Bilibili、Steamなどのさまざまなプラットフォームからのデータの取得、さまざまなサードパーティサービスとの連携などです。
> \[!TIP]
>
> [📘 プラグインの使用][docs-usage-plugin]について詳しくはこちらをご覧ください。
<!-- PLUGIN LIST -->
| 最近の提出 | 説明 |
| ------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- |
| [ショッピングツール](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **shoppingtools** on **2024-07-19**</sup> | eBayとAliExpressで製品を検索し、eBayのイベントとクーポンを見つけます。プロンプトの例を取得します。<br/>`ショッピング` `e-bay` `ali-express` `クーポン` |
| [Savvy Trader AI](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **savvytrader** on **2024-06-27**</sup> | リアルタイムの株式、暗号通貨、その他の投資データ。<br/>`株式` `分析` |
| [ソーシャル検索](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **say-apps** on **2024-06-02**</sup> | ソーシャル検索は、ツイート、ユーザー、フォロワー、画像、メディアなどへのアクセスを提供します。<br/>`ソーシャル` `ツイッター` `x` `検索` |
| [スペース](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **automateyournetwork** on **2024-05-12**</sup> | NASAを含む宇宙データ。<br/>`宇宙` `nasa` |
> 📊 合計プラグイン数: [<kbd>**52**</kbd>](https://github.com/lobehub/lobe-chat-plugins)
<!-- PLUGIN LIST -->
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-agent]][docs-feat-agent]
### `7` [エージェントマーケット (GPTs)][docs-feat-agent]
LobeChatエージェントマーケットプレイスでは、クリエイターが多くの優れたエージェントを発見できる活気に満ちた革新的なコミュニティを提供しています。
これらのエージェントは、仕事のシナリオで重要な役割を果たすだけでなく、学習プロセスでも大いに便利です。
私たちのマーケットプレイスは、単なるショーケースプラットフォームではなく、協力の場でもあります。ここでは、誰もが自分の知恵を貢献し、開発したエージェントを共有できます。
> \[!TIP]
>
> [🤖/🏪 エージェントを提出][submit-agents-link]することで、簡単にエージェント作品をプラットフォームに提出できます。
> 重要なのは、LobeChatが高度な自動化国際化i18nワークフローを確立しており、
> あなたのエージェントを複数の言語バージョンにシームレスに翻訳できることです。
> これにより、ユーザーがどの言語を話していても、エージェントを障害なく体験できます。
> \[!IMPORTANT]
>
> すべてのユーザーがこの成長するエコシステムに参加し、エージェントの反復と最適化に参加することを歓迎します。
> 一緒に、より面白く、実用的で革新的なエージェントを作成し、エージェントの多様性と実用性をさらに豊かにしましょう。
<!-- AGENT LIST -->
| 最近の提出 | 説明 |
| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| [Cプログラム学習アシスタント](https://chat-preview.lobehub.com/market?agent=sichuan-university-941-c-programming-assistant)<br/><sup>By **[YBGuoYang](https://github.com/YBGuoYang)** on **2024-07-28**</sup> | Cプログラム設計の学習を支援します<br/>`941` |
| [ブランドパイオニア](https://chat-preview.lobehub.com/market?agent=brand-pioneer)<br/><sup>By **[SaintFresh](https://github.com/SaintFresh)** on **2024-07-25**</sup> | ブランド開発の専門家、思想リーダー、ブランド戦略のスーパー天才、ブランドビジョナリー。ブランドパイオニアは、革新の最前線の探検家であり、自分の分野の発明者です。市場を提供し、専門分野の画期的な進展を特徴とする未来の世界を想像させてください。<br/>`ビジネス` `ブランドパイオニア` `ブランド開発` `ビジネスアシスタント` `ブランドナラティブ` |
| [ネットワークセキュリティアシスタント](https://chat-preview.lobehub.com/market?agent=cybersecurity-copilot)<br/><sup>By **[huoji120](https://github.com/huoji120)** on **2024-07-23**</sup> | ログ、コード、逆コンパイルを分析し、問題を特定し、最適化の提案を提供するネットワークセキュリティの専門家アシスタント。<br/>`ネットワークセキュリティ` `トラフィック分析` `ログ分析` `コード逆コンパイル` `ctf` |
| [BIDOSx2](https://chat-preview.lobehub.com/market?agent=bidosx-2-v-2)<br/><sup>By **[SaintFresh](https://github.com/SaintFresh)** on **2024-07-21**</sup> | 従来のAIを超越する高度なAI LLM。'BIDOS'は、'ブランドのアイデア、開発、運営、スケーリング'と'ビジネスインテリジェンス決定最適化システム'の両方を意味します。<br/>`ブランド開発` `aiアシスタント` `市場分析` `戦略計画` `ビジネス最適化` `ビジネスインテリジェンス` |
> 📊 合計エージェント数: [<kbd>**307**</kbd> ](https://github.com/lobehub/lobe-chat-agents)
<!-- AGENT LIST -->
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-database]][docs-feat-database]
### `8` [ローカル / リモートデータベースのサポート][docs-feat-database]
LobeChatは、サーバーサイドデータベースとローカルデータベースの両方の使用をサポートしています。ニーズに応じて、適切なデプロイメントソリューションを選択できます
- **ローカルデータベース**データとプライバシー保護に対するより多くの制御を希望するユーザーに適しています。LobeChatはCRDTConflict-Free Replicated Data Type技術を使用してマルチデバイス同期を実現しています。これはシームレスなデータ同期体験を提供することを目的とした実験的な機能です。
- **サーバーサイドデータベース**より便利なユーザー体験を希望するユーザーに適しています。LobeChatはPostgreSQLをサーバーサイドデータベースとしてサポートしています。サーバーサイドデータベースの設定方法についての詳細なドキュメントは、[サーバーサイドデータベースの設定](https://lobehub.com/docs/self-hosting/advanced/server-database)をご覧ください。
どのデータベースを選択しても、LobeChatは優れたユーザー体験を提供します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-auth]][docs-feat-auth]
### `9` [マルチユーザ管理のサポート][docs-feat-auth]
LobeChatはマルチユーザ管理をサポートし、異なるニーズに応じて2つの主要なユーザ認証および管理ソリューションを提供します
- **next-auth**LobeChatは、複数の認証方法OAuth、メールログイン、資格情報ログインなどをサポートする柔軟で強力な認証ライブラリである`next-auth`を統合しています。`next-auth`を使用すると、ユーザの登録、ログイン、セッション管理、ソーシャルログインなどの機能を簡単に実装し、ユーザデータのセキュリティとプライバシーを確保できます。
- **Clerk**より高度なユーザ管理機能が必要なユーザ向けに、LobeChatは`Clerk`もサポートしています。`Clerk`は、現代的なユーザ管理プラットフォームであり、多要素認証MFA、ユーザプロファイル管理、ログイン活動の監視など、より豊富な機能を提供します。`Clerk`を使用すると、より高いセキュリティと柔軟性を得ることができ、複雑なユーザ管理ニーズに簡単に対応できます。
どのユーザ管理ソリューションを選択しても、LobeChatは優れたユーザー体験と強力な機能サポートを提供します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-pwa]][docs-feat-pwa]
### `10` [プログレッシブウェブアプリ (PWA)][docs-feat-pwa]
私たちは、今日のマルチデバイス環境でユーザーにシームレスな体験を提供することの重要性を深く理解しています。
そのため、プログレッシブウェブアプリケーション([PWA](https://support.google.com/chrome/answer/9658361))技術を採用しました。
これは、ウェブアプリケーションをネイティブアプリに近い体験に引き上げるモダンなウェブ技術です。
PWAを通じて、LobeChatはデスクトップとモバイルデバイスの両方で高度に最適化されたユーザー体験を提供しながら、その軽量で高性能な特性を維持します。
視覚的および感覚的には、インターフェースを慎重に設計し、ネイティブアプリと区別がつかないようにし、
スムーズなアニメーション、レスポンシブレイアウト、および異なるデバイスの画面解像度に適応するようにしています。
> \[!NOTE]
>
> PWAのインストールプロセスに慣れていない場合は、以下の手順に従ってLobeChatをデスクトップアプリケーションモバイルデバイスにも適用として追加できます
>
> - コンピュータでChromeまたはEdgeブラウザを起動します。
> - LobeChatのウェブページにアクセスします。
> - アドレスバーの右上にある<kbd>インストール</kbd>アイコンをクリックします。
> - 画面の指示に従ってPWAのインストールを完了します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-mobile]][docs-feat-mobile]
### `11` [モバイルデバイスの適応][docs-feat-mobile]
モバイルデバイスのユーザー体験を向上させるために、一連の最適化設計を行いました。現在、モバイルユーザー体験のバージョンを繰り返し改善しています。ご意見やアイデアがある場合は、GitHub IssuesやPull Requestsを通じてフィードバックをお寄せください。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
[![][image-feat-theme]][docs-feat-theme]
### `12` [カスタムテーマ][docs-feat-theme]
デザインエンジニアリング指向のアプリケーションとして、LobeChatはユーザーの個別体験を重視しており、
柔軟で多様なテーマモードを導入しています。日中のライトモードと夜間のダークモードを含みます。
テーマモードの切り替えに加えて、さまざまな色のカスタマイズオプションを提供し、ユーザーが自分の好みに応じてアプリケーションのテーマカラーを調整できるようにしています。
落ち着いたダークブルー、活気のあるピーチピンク、プロフェッショナルなグレーホワイトなど、LobeChatでは自分のスタイルに合った色の選択肢を見つけることができます。
> \[!TIP]
>
> デフォルトの設定は、ユーザーのシステムのカラーモードをインテリジェントに認識し、テーマを自動的に切り替えて、オペレーティングシステムと一貫した視覚体験を提供します。
> 詳細を手動で制御するのが好きなユーザーには、直感的な設定オプションと、会話シナリオに対してチャットバブルモードとドキュメントモードの選択肢を提供します。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
### `*` その他の特徴
これらの特徴に加えて、LobeChatは基本的な技術基盤も優れています
- [x] 💨 **迅速なデプロイ**VercelプラットフォームまたはDockerイメージを使用して、ワンクリックでデプロイを行い、1分以内にプロセスを完了できます。複雑な設定は不要です。
- [x] 🌐 **カスタムドメイン**:ユーザーが独自のドメインを持っている場合、プラットフォームにバインドして、どこからでも対話エージェントに迅速にアクセスできます。
- [x] 🔒 **プライバシー保護**:すべてのデータはユーザーのブラウザにローカルに保存され、ユーザーのプライバシーを保護します。
- [x] 💎 **洗練されたUIデザイン**慎重に設計されたインターフェースで、エレガントな外観とスムーズなインタラクションを提供します。ライトモードとダークモードをサポートし、モバイルフレンドリーです。PWAサポートにより、よりネイティブに近い体験を提供します。
- [x] 🗣️ **スムーズな会話体験**流れるような応答により、スムーズな会話体験を提供します。Markdownレンダリングを完全にサポートし、コードのハイライト、LaTexの数式、Mermaidのフローチャートなどを含みます。
> ✨ LobeChatの進化に伴い、さらに多くの機能が追加されます。
---
> \[!NOTE]
>
> 今後の[ロードマップ][github-project-link]計画は、Projectsセクションで確認できます。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## ⚡️ パフォーマンス
> \[!NOTE]
>
> 完全なレポートのリストは[📘 Lighthouseレポート][docs-lighthouse]で確認できます。
| デスクトップ | モバイル |
| :-----------------------------------------: | :----------------------------------------: |
| ![][chat-desktop] | ![][chat-mobile] |
| [📑 Lighthouseレポート][chat-desktop-report] | [📑 Lighthouseレポート][chat-mobile-report] |
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## 🛳 自己ホスティング
LobeChatは、Vercelと[Dockerイメージ][docker-release-link]を使用した自己ホスティングバージョンを提供しています。これにより、事前の知識がなくても数分で独自のチャットボットをデプロイできます。
> \[!TIP]
>
> [📘 独自のLobeChatを構築する][docs-self-hosting]について詳しくはこちらをご覧ください。
### `A` Vercel、Zeabur、Sealosでのデプロイ
このサービスをVercelまたはZeaburでデプロイしたい場合は、以下の手順に従ってください
- [OpenAI API Key](https://platform.openai.com/account/api-keys)を準備します。
- 下のボタンをクリックしてデプロイを開始しますGitHubアカウントで直接ログインし、環境変数セクションに`OPENAI_API_KEY`(必須)と`ACCESS_CODE`(推奨)を入力します。
- デプロイが完了したら、使用を開始できます。
- カスタムドメインをバインドオプションVercelが割り当てたドメインのDNSは一部の地域で汚染されているため、カスタムドメインをバインドすることで直接接続できます。
<div align="center">
| Vercelでデプロイ | Zeaburでデプロイ | Sealosでデプロイ |
| :-------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------: |
| [![][deploy-button-image]][deploy-link] | [![][deploy-on-zeabur-button-image]][deploy-on-zeabur-link] | [![][deploy-on-sealos-button-image]][deploy-on-sealos-link] |
</div>
#### フォーク後
フォーク後、リポジトリのアクションページで他のアクションを無効にし、アップストリーム同期アクションのみを保持します。
#### 更新を維持
READMEのワンクリックデプロイ手順に従って独自のプロジェクトをデプロイした場合、「更新が利用可能です」というプロンプトが常に表示されることがあります。これは、Vercelがデフォルトで新しいプロジェクトを作成し、フォークしないため、更新を正確に検出できないためです。
> \[!TIP]
>
> [📘 最新バージョンと自動同期][docs-upstream-sync]の手順に従って再デプロイすることをお勧めします。
<br/>
### `B` Dockerでのデプロイ
[![][docker-release-shield]][docker-release-link]
[![][docker-size-shield]][docker-size-link]
[![][docker-pulls-shield]][docker-pulls-link]
LobeChatサービスを独自のプライベートデバイスにデプロイするためのDockerイメージを提供しています。以下のコマンドを使用してLobeChatサービスを開始します
```fish
$ docker run -d -p 3210:3210 \
-e OPENAI_API_KEY=sk-xxxx \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
```
> \[!TIP]
>
> OpenAIサービスをプロキシ経由で使用する必要がある場合は、`OPENAI_PROXY_URL`環境変数を使用してプロキシアドレスを設定できます:
```fish
$ docker run -d -p 3210:3210 \
-e OPENAI_API_KEY=sk-xxxx \
-e OPENAI_PROXY_URL=https://api-proxy.com/v1 \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
```
> \[!NOTE]
>
> Dockerを使用したデプロイの詳細な手順については、[📘 Dockerデプロイガイド][docs-docker]を参照してください。
<br/>
### 環境変数
このプロジェクトは、環境変数で設定される追加の構成項目を提供します:
| 環境変数 | 必須 | 説明 | 例 |
| -------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------- |
| `OPENAI_API_KEY` | はい | これはOpenAIアカウントページで申請したAPIキーです | `sk-xxxxxx...xxxxxx` |
| `OPENAI_PROXY_URL` | いいえ | OpenAIインターフェイスプロキシを手動で設定する場合、この設定項目を使って、デフォルトのOpenAI APIリクエストベースURLを上書きすることができます。 | `https://api.chatanywhere.cn` または `https://aihubmix.com/v1` <br/>デフォルトの値は<br/>`https://api.openai.com/v1` |
| `ACCESS_CODE` | いいえ | このサービスにアクセスするためのパスワードを追加します。漏洩を避けるために長いパスワードを設定することができます。この値にカンマが含まれる場合は、パスワードの配列となります。 | `awCTe)re_r74` または `rtrt_ewee3@09!` または `code1,code2,code3` |
| `OPENAI_MODEL_LIST` | いいえ | モデルリストをコントロールするために使用します。モデルを追加するには `+` を、モデルを非表示にするには `-` を、モデルの表示名をカンマ区切りでカスタマイズするには `model_name=display_name` を使用します。 | `qwen-7b-chat,+glm-6b,-gpt-3.5-turbo` |
> \[!NOTE]
>
> 環境変数の完全なリストは [📘環境変数][docs-env-var] にあります
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## 📦 エコシステム
| NPM | リポジトリ | 説明 | バージョン |
| --------------------------------- | --------------------------------------- | ----------------------------------------------------------------------------------------------------- | ----------------------------------------- |
| [@lobehub/ui][lobe-ui-link] | [lobehub/lobe-ui][lobe-ui-github] | AIGC ウェブアプリケーション構築専用のオープンソースUIコンポーネントライブラリ。 | [![][lobe-ui-shield]][lobe-ui-link] |
| [@lobehub/icons][lobe-icons-link] | [lobehub/lobe-icons][lobe-icons-github] | 人気の AI/LLM モデルブランドの SVG ロゴとアイコン集。 | [![][lobe-icons-shield]][lobe-icons-link] |
| [@lobehub/tts][lobe-tts-link] | [lobehub/lobe-tts][lobe-tts-github] | 高品質で信頼性の高い TTS/STT React Hooks ライブラリ | [![][lobe-tts-shield]][lobe-tts-link] |
| [@lobehub/lint][lobe-lint-link] | [lobehub/lobe-lint][lobe-lint-github] | LobeHub の ESlint、Stylelint、Commitlint、Prettier、Remark、Semantic Release の設定。 | [![][lobe-lint-shield]][lobe-lint-link] |
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## 🧩 プラグイン
プラグインは、LobeChatの[関数呼び出し][docs-functionc-call]機能を拡張する手段を提供します。プラグインを使用して、新しい関数呼び出しやメッセージ結果の新しいレンダリング方法を導入することができます。プラグイン開発に興味がある方は、Wikiの[📘プラグイン開発ガイド][docs-plugin-dev]を参照してください。
- [lobe-chat-plugins][lobe-chat-plugins]: これはLobeChatのプラグインインデックスです。このリポジトリからindex.jsonにアクセスし、LobeChatで利用可能なプラグインのリストをユーザに表示します。
- [chat-plugin-template][chat-plugin-template]: これはLobeChatプラグイン開発用のプラグインテンプレートです。
- [@lobehub/chat-plugin-sdk][chat-plugin-sdk]: LobeChatプラグインSDKは、Lobe Chat用の優れたチャットプラグインの作成を支援します。
- [@lobehub/chat-plugins-gateway][chat-plugins-gateway]: LobeChat Plugins Gatewayは、LobeChatプラグインのためのゲートウェイを提供するバックエンドサービスです。このサービスはVercelを使用してデプロイされます。プライマリAPIのPOST /api/v1/runnerはEdge Functionとしてデプロイされます。
> \[!NOTE]
>
> プラグインシステムは現在大規模な開発中です。詳しくは以下の issue をご覧ください:
>
> - [x] [**プラグインフェイズ 1**](https://github.com/lobehub/lobe-chat/issues/73): プラグインを本体から分離し、メンテナンスのためにプラグインを独立したリポジトリに分割し、プラグインの動的ロードを実現する。
> - [x] [**プラグインフェイズ 2**](https://github.com/lobehub/lobe-chat/issues/97): プラグイン使用の安全性と安定性、より正確な異常状態の提示、プラグインアーキテクチャの保守性、開発者フレンドリー。
> - [x] [**プラグインフェイズ 3**](https://github.com/lobehub/lobe-chat/issues/149): より高度で包括的なカスタマイズ機能、プラグイン認証のサポート、サンプル。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## ⌨️ ローカル開発
GitHub Codespaces を使ってオンライン開発ができます:
[![][codespaces-shield]][codespaces-link]
Or clone it for local development:
```fish
$ git clone https://github.com/lobehub/lobe-chat.git
$ cd lobe-chat
$ pnpm install
$ pnpm dev
```
より詳しい情報をお知りになりたい方は、[📘開発ガイド][docs-dev-guide]をご覧ください。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## 🤝 コントリビュート
どのようなタイプのコントリビュートも大歓迎です;コードを提供することに興味がある方は、GitHub の [Issues][github-issues-link] や [Projects][github-project-link] をチェックして、あなたの力をお貸しください。
> \[!TIP]
>
> 私たちは技術主導のフォーラムを創設し、知識の交流とアイデアの交換を促進することで、相互のインスピレーションと協力的なイノベーションを生み出すことを目指しています。
>
> LobeChat の改善にご協力ください。製品設計のフィードバックやユーザー体験に関するディスカッションを直接お寄せください。
>
> **プリンシパルメンテナー:** [@arvinxx](https://github.com/arvinxx) [@canisminor1990](https://github.com/canisminor1990)
[![][pr-welcome-shield]][pr-welcome-link]
[![][submit-agents-shield]][submit-agents-link]
[![][submit-plugin-shield]][submit-plugin-link]
<a href="https://github.com/lobehub/lobe-chat/graphs/contributors" target="_blank">
<table>
<tr>
<th colspan="2">
<br><img src="https://contrib.rocks/image?repo=lobehub/lobe-chat"><br><br>
</th>
</tr>
<tr>
<td>
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://next.ossinsight.io/widgets/official/compose-org-active-contributors/thumbnail.png?activity=active&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=2x3&color_scheme=dark">
<img src="https://next.ossinsight.io/widgets/official/compose-org-active-contributors/thumbnail.png?activity=active&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=2x3&color_scheme=light">
</picture>
</td>
<td rowspan="2">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://next.ossinsight.io/widgets/official/compose-org-participants-growth/thumbnail.png?activity=active&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=4x7&color_scheme=dark">
<img src="https://next.ossinsight.io/widgets/official/compose-org-participants-growth/thumbnail.png?activity=active&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=4x7&color_scheme=light">
</picture>
</td>
</tr>
<tr>
<td>
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://next.ossinsight.io/widgets/official/compose-org-active-contributors/thumbnail.png?activity=new&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=2x3&color_scheme=dark">
<img src="https://next.ossinsight.io/widgets/official/compose-org-active-contributors/thumbnail.png?activity=new&period=past_28_days&owner_id=131470832&repo_ids=643445235&image_size=2x3&color_scheme=light">
</picture>
</td>
</tr>
</table>
</a>
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## ❤️ スポンサー
あなたの一度きりの寄付が、私たちの銀河系で輝きを放ちます!皆様は流れ星であり、私たちの旅路に迅速かつ明るい影響を与えます。私たちを信じてくださり、ありがとうございます。皆様の寛大なお気持ちが、私たちの使命に向かって、一度に輝かしい閃光を放つよう導いてくださるのです。
<a href="https://opencollective.com/lobehub" target="_blank">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/lobehub/.github/blob/main/static/sponsor-dark.png?raw=true">
<img src="https://github.com/lobehub/.github/blob/main/static/sponsor-light.png?raw=true">
</picture>
</a>
<div align="right">
[![][back-to-top]](#readme-top)
</div>
## 🔗 その他の製品
- **[🅰️ Lobe SD Theme][lobe-theme]:** Stable Diffusion WebUI のためのモダンなテーマ、絶妙なインターフェースデザイン、高度にカスタマイズ可能なUI、効率を高める機能。
- **[⛵️ Lobe Midjourney WebUI][lobe-midjourney-webui]:** Midjourney の WebUI は、AI を活用しテキストプロンプトから豊富で多様な画像を素早く生成し、創造性を刺激して会話を盛り上げます。
- **[🌏 Lobe i18n][lobe-i18n] :** Lobe i18n は ChatGPT を利用した国際化翻訳プロセスの自動化ツールです。大きなファイルの自動分割、増分更新、OpenAIモデル、APIプロキシ、温度のカスタマイズオプションなどの機能をサポートしています。
- **[💌 Lobe Commit][lobe-commit]:** Lobe Commit は、Langchain/ChatGPT を活用して Gitmoji ベースのコミットメッセージを生成する CLI ツールです。
<div align="right">
[![][back-to-top]](#readme-top)
</div>
---
<details><summary><h4>📝 License</h4></summary>
[![][fossa-license-shield]][fossa-license-link]
</details>
Copyright © 2024 [LobeHub][profile-link]. <br />
This project is [Apache 2.0](./LICENSE) licensed.
<!-- LINK GROUP -->
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square
[blog]: https://lobehub.com/blog
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/chat_preview_lobehub_com_chat.html
[chat-plugin-sdk]: https://github.com/lobehub/chat-plugin-sdk
[chat-plugin-template]: https://github.com/lobehub/chat-plugin-template
[chat-plugins-gateway]: https://github.com/lobehub/chat-plugins-gateway
[codecov-link]: https://codecov.io/gh/lobehub/lobe-chat
[codecov-shield]: https://img.shields.io/codecov/c/github/lobehub/lobe-chat?labelColor=black&style=flat-square&logo=codecov&logoColor=white
[codespaces-link]: https://codespaces.new/lobehub/lobe-chat
[codespaces-shield]: https://github.com/codespaces/badge.svg
[deploy-button-image]: https://vercel.com/button
[deploy-link]: https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat&env=OPENAI_API_KEY,ACCESS_CODE&envDescription=Find%20your%20OpenAI%20API%20Key%20by%20click%20the%20right%20Learn%20More%20button.%20%7C%20Access%20Code%20can%20protect%20your%20website&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=lobe-chat&repository-name=lobe-chat
[deploy-on-sealos-button-image]: https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg
[deploy-on-sealos-link]: https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dlobe-chat
[deploy-on-zeabur-button-image]: https://zeabur.com/button.svg
[deploy-on-zeabur-link]: https://zeabur.com/templates/VZGGTI
[discord-link]: https://discord.gg/AYFPHvv2jT
[discord-shield]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=flat-square
[discord-shield-badge]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=for-the-badge
[docker-pulls-link]: https://hub.docker.com/r/lobehub/lobe-chat
[docker-pulls-shield]: https://img.shields.io/docker/pulls/lobehub/lobe-chat?color=45cc11&labelColor=black&style=flat-square
[docker-release-link]: https://hub.docker.com/r/lobehub/lobe-chat
[docker-release-shield]: https://img.shields.io/docker/v/lobehub/lobe-chat?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square
[docker-size-link]: https://hub.docker.com/r/lobehub/lobe-chat
[docker-size-shield]: https://img.shields.io/docker/image-size/lobehub/lobe-chat?color=369eff&labelColor=black&style=flat-square
[docs]: https://lobehub.com/docs/usage/start
[docs-dev-guide]: https://github.com/lobehub/lobe-chat/wiki/index
[docs-docker]: https://lobehub.com/docs/self-hosting/platform/docker
[docs-env-var]: https://lobehub.com/docs/self-hosting/environment-variables
[docs-feat-agent]: https://lobehub.com/docs/usage/features/agent-market
[docs-feat-auth]: https://lobehub.com/docs/usage/features/auth
[docs-feat-database]: https://lobehub.com/docs/usage/features/database
[docs-feat-local]: https://lobehub.com/docs/usage/features/local-llm
[docs-feat-mobile]: https://lobehub.com/docs/usage/features/mobile
[docs-feat-plugin]: https://lobehub.com/docs/usage/features/plugin-system
[docs-feat-provider]: https://lobehub.com/docs/usage/features/multi-ai-providers
[docs-feat-pwa]: https://lobehub.com/docs/usage/features/pwa
[docs-feat-t2i]: https://lobehub.com/docs/usage/features/text-to-image
[docs-feat-theme]: https://lobehub.com/docs/usage/features/theme
[docs-feat-tts]: https://lobehub.com/docs/usage/features/tts
[docs-feat-vision]: https://lobehub.com/docs/usage/features/vision
[docs-functionc-call]: https://lobehub.com/blog/openai-function-call
[docs-lighthouse]: https://github.com/lobehub/lobe-chat/wiki/Lighthouse
[docs-plugin-dev]: https://lobehub.com/docs/usage/plugins/development
[docs-self-hosting]: https://lobehub.com/docs/self-hosting/start
[docs-upstream-sync]: https://lobehub.com/docs/self-hosting/advanced/upstream-sync
[docs-usage-ollama]: https://lobehub.com/docs/usage/providers/ollama
[docs-usage-plugin]: https://lobehub.com/docs/usage/plugins/basic
[fossa-license-link]: https://app.fossa.com/projects/git%2Bgithub.com%2Flobehub%2Flobe-chat
[fossa-license-shield]: https://app.fossa.com/api/projects/git%2Bgithub.com%2Flobehub%2Flobe-chat.svg?type=large
[github-action-release-link]: https://github.com/actions/workflows/lobehub/lobe-chat/release.yml
[github-action-release-shield]: https://img.shields.io/github/actions/workflow/status/lobehub/lobe-chat/release.yml?label=release&labelColor=black&logo=githubactions&logoColor=white&style=flat-square
[github-action-test-link]: https://github.com/actions/workflows/lobehub/lobe-chat/test.yml
[github-action-test-shield]: https://img.shields.io/github/actions/workflow/status/lobehub/lobe-chat/test.yml?label=test&labelColor=black&logo=githubactions&logoColor=white&style=flat-square
[github-contributors-link]: https://github.com/lobehub/lobe-chat/graphs/contributors
[github-contributors-shield]: https://img.shields.io/github/contributors/lobehub/lobe-chat?color=c4f042&labelColor=black&style=flat-square
[github-forks-link]: https://github.com/lobehub/lobe-chat/network/members
[github-forks-shield]: https://img.shields.io/github/forks/lobehub/lobe-chat?color=8ae8ff&labelColor=black&style=flat-square
[github-issues-link]: https://github.com/lobehub/lobe-chat/issues
[github-issues-shield]: https://img.shields.io/github/issues/lobehub/lobe-chat?color=ff80eb&labelColor=black&style=flat-square
[github-license-link]: https://github.com/lobehub/lobe-chat/blob/main/LICENSE
[github-license-shield]: https://img.shields.io/badge/license-apache%202.0-white?labelColor=black&style=flat-square
[github-project-link]: https://github.com/lobehub/lobe-chat/projects
[github-release-link]: https://github.com/lobehub/lobe-chat/releases
[github-release-shield]: https://img.shields.io/github/v/release/lobehub/lobe-chat?color=369eff&labelColor=black&logo=github&style=flat-square
[github-releasedate-link]: https://github.com/lobehub/lobe-chat/releases
[github-releasedate-shield]: https://img.shields.io/github/release-date/lobehub/lobe-chat?labelColor=black&style=flat-square
[github-stars-link]: https://github.com/lobehub/lobe-chat/network/stargazers
[github-stars-shield]: https://img.shields.io/github/stars/lobehub/lobe-chat?color=ffcb47&labelColor=black&style=flat-square
[github-trending-shield]: https://trendshift.io/api/badge/repositories/2256
[github-trending-url]: https://trendshift.io/repositories/2256
[image-banner]: https://github.com/lobehub/lobe-chat/assets/28616219/9f155dff-4737-429f-9cad-a70a1a860c5f
[image-feat-agent]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670869-f1ffbf66-42b6-42cf-a937-9ce1f8328514.png
[image-feat-auth]: https://github.com/lobehub/lobe-chat/assets/17870709/8ce70e15-40df-451e-b700-66090fe5b8c2
[image-feat-database]: https://github.com/lobehub/lobe-chat/assets/17870709/c27a0234-a4e9-40e5-8bcb-42d5ce7e40f9
[image-feat-local]: https://github.com/lobehub/lobe-chat/assets/28616219/ca9a21bc-ea6c-4c90-bf4a-fa53b4fb2b5c
[image-feat-mobile]: https://gw.alipayobjects.com/zos/kitchen/R441AuFS4W/mobile.webp
[image-feat-plugin]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670883-33c43a5c-a512-467e-855c-fa299548cce5.png
[image-feat-privoder]: https://github.com/lobehub/lobe-chat/assets/28616219/b164bc54-8ba2-4c1e-b2f2-f4d7f7e7a551
[image-feat-pwa]: https://gw.alipayobjects.com/zos/kitchen/69x6bllkX3/pwa.webp
[image-feat-t2i]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/297746445-0ff762b9-aa08-4337-afb7-12f932b6efbb.png
[image-feat-theme]: https://gw.alipayobjects.com/zos/kitchen/pvus1lo%26Z7/darkmode.webp
[image-feat-tts]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072124-c9853d8d-f1b5-44a8-a305-45ebc0f6d19a.png
[image-feat-vision]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072129-382bdf30-e3d6-4411-b5a0-249710b8ba08.png
[image-overview]: https://github.com/lobehub/lobe-chat/assets/17870709/56b95d48-f573-41cd-8b38-387bf88bc4bf
[image-star]: https://github.com/lobehub/lobe-chat/assets/17870709/cb06b748-513f-47c2-8740-d876858d7855
[issues-link]: https://img.shields.io/github/issues/lobehub/lobe-chat.svg?style=flat
[lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins
[lobe-commit]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-commit
[lobe-i18n]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-i18n
[lobe-icons-github]: https://github.com/lobehub/lobe-icons
[lobe-icons-link]: https://www.npmjs.com/package/@lobehub/icons
[lobe-icons-shield]: https://img.shields.io/npm/v/@lobehub/icons?color=369eff&labelColor=black&logo=npm&logoColor=white&style=flat-square
[lobe-lint-github]: https://github.com/lobehub/lobe-lint
[lobe-lint-link]: https://www.npmjs.com/package/@lobehub/lint
[lobe-lint-shield]: https://img.shields.io/npm/v/@lobehub/lint?color=369eff&labelColor=black&logo=npm&logoColor=white&style=flat-square
[lobe-midjourney-webui]: https://github.com/lobehub/lobe-midjourney-webui
[lobe-theme]: https://github.com/lobehub/sd-webui-lobe-theme
[lobe-tts-github]: https://github.com/lobehub/lobe-tts
[lobe-tts-link]: https://www.npmjs.com/package/@lobehub/tts
[lobe-tts-shield]: https://img.shields.io/npm/v/@lobehub/tts?color=369eff&labelColor=black&logo=npm&logoColor=white&style=flat-square
[lobe-ui-github]: https://github.com/lobehub/lobe-ui
[lobe-ui-link]: https://www.npmjs.com/package/@lobehub/ui
[lobe-ui-shield]: https://img.shields.io/npm/v/@lobehub/ui?color=369eff&labelColor=black&logo=npm&logoColor=white&style=flat-square
[official-site]: https://lobehub.com
[pr-welcome-link]: https://github.com/lobehub/lobe-chat/pulls
[pr-welcome-shield]: https://img.shields.io/badge/🤯_pr_welcome-%E2%86%92-ffcb47?labelColor=black&style=for-the-badge
[profile-link]: https://github.com/lobehub
[share-linkedin-link]: https://linkedin.com/feed
[share-linkedin-shield]: https://img.shields.io/badge/-share%20on%20linkedin-black?labelColor=black&logo=linkedin&logoColor=white&style=flat-square
[share-mastodon-link]: https://mastodon.social/share?text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source,%20extensible%20(Function%20Calling),%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT/LLM%20web%20application.%20https://github.com/lobehub/lobe-chat%20#chatbot%20#chatGPT%20#openAI
[share-mastodon-shield]: https://img.shields.io/badge/-share%20on%20mastodon-black?labelColor=black&logo=mastodon&logoColor=white&style=flat-square
[share-reddit-link]: https://www.reddit.com/submit?title=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20%23chatbot%20%23chatGPT%20%23openAI&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat
[share-reddit-shield]: https://img.shields.io/badge/-share%20on%20reddit-black?labelColor=black&logo=reddit&logoColor=white&style=flat-square
[share-telegram-link]: https://t.me/share/url"?text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20%23chatbot%20%23chatGPT%20%23openAI&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat
[share-telegram-shield]: https://img.shields.io/badge/-share%20on%20telegram-black?labelColor=black&logo=telegram&logoColor=white&style=flat-square
[share-weibo-link]: http://service.weibo.com/share/share.php?sharesource=weibo&title=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20%23chatbot%20%23chatGPT%20%23openAI&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat
[share-weibo-shield]: https://img.shields.io/badge/-share%20on%20weibo-black?labelColor=black&logo=sinaweibo&logoColor=white&style=flat-square
[share-whatsapp-link]: https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat%20%23chatbot%20%23chatGPT%20%23openAI
[share-whatsapp-shield]: https://img.shields.io/badge/-share%20on%20whatsapp-black?labelColor=black&logo=whatsapp&logoColor=white&style=flat-square
[share-x-link]: https://x.com/intent/tweet?hashtags=chatbot%2CchatGPT%2CopenAI&text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat
[share-x-shield]: https://img.shields.io/badge/-share%20on%20x-black?labelColor=black&logo=x&logoColor=white&style=flat-square
[sponsor-link]: https://opencollective.com/lobehub 'Become ❤️ LobeHub Sponsor'
[sponsor-shield]: https://img.shields.io/badge/-Sponsor%20LobeHub-f04f88?logo=opencollective&logoColor=white&style=flat-square
[submit-agents-link]: https://github.com/lobehub/lobe-chat-agents
[submit-agents-shield]: https://img.shields.io/badge/🤖/🏪_submit_agent-%E2%86%92-c4f042?labelColor=black&style=for-the-badge
[submit-plugin-link]: https://github.com/lobehub/lobe-chat-plugins
[submit-plugin-shield]: https://img.shields.io/badge/🧩/🏪_submit_plugin-%E2%86%92-95f3d9?labelColor=black&style=for-the-badge
[vercel-link]: https://chat-preview.lobehub.com
[vercel-shield]: https://img.shields.io/badge/vercel-online-55b467?labelColor=black&logo=vercel&style=flat-square
[vercel-shield-badge]: https://img.shields.io/badge/TRY%20LOBECHAT-ONLINE-55b467?labelColor=black&logo=vercel&style=for-the-badge

@ -8,7 +8,7 @@ An open-source, modern-design ChatGPT/LLMs UI/Framework.<br/>
Supports speech-synthesis, multi-modal, and extensible ([function call][docs-functionc-call]) plugin system.<br/> Supports speech-synthesis, multi-modal, and extensible ([function call][docs-functionc-call]) plugin system.<br/>
One-click **FREE** deployment of your private OpenAI ChatGPT/Claude/Gemini/Groq/Ollama chat application. One-click **FREE** deployment of your private OpenAI ChatGPT/Claude/Gemini/Groq/Ollama chat application.
**English** · [简体中文](./README.zh-CN.md) · [日本語](./README.ja-JP.md) · [Official Site][official-site] · [Changelog](./CHANGELOG.md) · [Documents][docs] · [Blog][blog] · [Feedback][github-issues-link] **English** · [简体中文](./README.zh-CN.md) · [Official Site][official-site] · [Changelog][changelog] · [Documents][docs] · [Blog][blog] · [Feedback][github-issues-link]
<!-- SHIELD GROUP --> <!-- SHIELD GROUP -->
@ -41,7 +41,7 @@ One-click **FREE** deployment of your private OpenAI ChatGPT/Claude/Gemini/Groq/
[![][github-trending-shield]][github-trending-url] [![][github-trending-shield]][github-trending-url]
[![][image-overview]][vercel-link] ![][image-overview]
</div> </div>
@ -52,23 +52,26 @@ One-click **FREE** deployment of your private OpenAI ChatGPT/Claude/Gemini/Groq/
- [👋🏻 Getting Started & Join Our Community](#-getting-started--join-our-community) - [👋🏻 Getting Started & Join Our Community](#-getting-started--join-our-community)
- [✨ Features](#-features) - [✨ Features](#-features)
- [`1` File Upload/Knowledge Base](#1-file-uploadknowledge-base) - [`1` Chain of Thought](#1-chain-of-thought)
- [`2` Multi-Model Service Provider Support](#2-multi-model-service-provider-support) - [`2` Branching Conversations](#2-branching-conversations)
- [`3` Local Large Language Model (LLM) Support](#3-local-large-language-model-llm-support) - [`3` Artifacts Support](#3-artifacts-support)
- [`4` Model Visual Recognition](#4-model-visual-recognition) - [`4` File Upload /Knowledge Base](#4-file-upload-knowledge-base)
- [`5` TTS & STT Voice Conversation](#5-tts--stt-voice-conversation) - [`5` Multi-Model Service Provider Support](#5-multi-model-service-provider-support)
- [`6` Text to Image Generation](#6-text-to-image-generation) - [`6` Local Large Language Model (LLM) Support](#6-local-large-language-model-llm-support)
- [`7` Plugin System (Function Calling)](#7-plugin-system-function-calling) - [`7` Model Visual Recognition](#7-model-visual-recognition)
- [`8` Agent Market (GPTs)](#8-agent-market-gpts) - [`8` TTS & STT Voice Conversation](#8-tts--stt-voice-conversation)
- [`9` Support Local / Remote Database](#9-support-local--remote-database) - [`9` Text to Image Generation](#9-text-to-image-generation)
- [`10` Support Multi-User Management](#10-support-multi-user-management) - [`10` Plugin System (Function Calling)](#10-plugin-system-function-calling)
- [`11` Progressive Web App (PWA)](#11-progressive-web-app-pwa) - [`11` Agent Market (GPTs)](#11-agent-market-gpts)
- [`12` Mobile Device Adaptation](#12-mobile-device-adaptation) - [`12` Support Local / Remote Database](#12-support-local--remote-database)
- [`13` Custom Themes](#13-custom-themes) - [`13` Support Multi-User Management](#13-support-multi-user-management)
- [`14` Progressive Web App (PWA)](#14-progressive-web-app-pwa)
- [`15` Mobile Device Adaptation](#15-mobile-device-adaptation)
- [`16` Custom Themes](#16-custom-themes)
- [`*` What's more](#-whats-more) - [`*` What's more](#-whats-more)
- [⚡️ Performance](#-performance) - [⚡️ Performance](#-performance)
- [🛳 Self Hosting](#-self-hosting) - [🛳 Self Hosting](#-self-hosting)
- [`A` Deploying with Vercel, Zeabur or Sealos](#a-deploying-with-vercel-zeabur-or-sealos) - [`A` Deploying with Vercel, Zeabur , Sealos or Alibaba Cloud](#a-deploying-with-vercel-zeabur--sealos-or-alibaba-cloud)
- [`B` Deploying with Docker](#b-deploying-with-docker) - [`B` Deploying with Docker](#b-deploying-with-docker)
- [Environment Variable](#environment-variable) - [Environment Variable](#environment-variable)
- [📦 Ecosystem](#-ecosystem) - [📦 Ecosystem](#-ecosystem)
@ -111,9 +114,48 @@ Whether for users or professional developers, LobeHub will be your AI Agent play
## ✨ Features ## ✨ Features
[![][image-feat-cot]][docs-feat-cot]
### `1` [Chain of Thought][docs-feat-cot]
Experience AI reasoning like never before. Watch as complex problems unfold step by step through our innovative Chain of Thought (CoT) visualization. This breakthrough feature provides unprecedented transparency into AI's decision-making process, allowing you to observe how conclusions are reached in real-time.
By breaking down complex reasoning into clear, logical steps, you can better understand and validate the AI's problem-solving approach. Whether you're debugging, learning, or simply curious about AI reasoning, CoT visualization transforms abstract thinking into an engaging, interactive experience.
[![][back-to-top]](#readme-top)
[![][image-feat-branch]][docs-feat-branch]
### `2` [Branching Conversations][docs-feat-branch]
Introducing a more natural and flexible way to chat with AI. With Branch Conversations, your discussions can flow in multiple directions, just like human conversations do. Create new conversation branches from any message, giving you the freedom to explore different paths while preserving the original context.
Choose between two powerful modes:
- **Continuation Mode:** Seamlessly extend your current discussion while maintaining valuable context
- **Standalone Mode:** Start fresh with a new topic based on any previous message
This groundbreaking feature transforms linear conversations into dynamic, tree-like structures, enabling deeper exploration of ideas and more productive interactions.
[![][back-to-top]](#readme-top)
[![][image-feat-artifacts]][docs-feat-artifacts]
### `3` [Artifacts Support][docs-feat-artifacts]
Experience the power of Claude Artifacts, now integrated into LobeChat. This revolutionary feature expands the boundaries of AI-human interaction, enabling real-time creation and visualization of diverse content formats.
Create and visualize with unprecedented flexibility:
- Generate and display dynamic SVG graphics
- Build and render interactive HTML pages in real-time
- Produce professional documents in multiple formats
[![][back-to-top]](#readme-top)
[![][image-feat-knowledgebase]][docs-feat-knowledgebase] [![][image-feat-knowledgebase]][docs-feat-knowledgebase]
### `1` [File Upload/Knowledge Base][docs-feat-knowledgebase] ### `4` [File Upload /Knowledge Base][docs-feat-knowledgebase]
LobeChat supports file upload and knowledge base functionality. You can upload various types of files including documents, images, audio, and video, as well as create knowledge bases, making it convenient for users to manage and search for files. Additionally, you can utilize files and knowledge base features during conversations, enabling a richer dialogue experience. LobeChat supports file upload and knowledge base functionality. You can upload various types of files including documents, images, audio, and video, as well as create knowledge bases, making it convenient for users to manage and search for files. Additionally, you can utilize files and knowledge base features during conversations, enabling a richer dialogue experience.
@ -131,7 +173,7 @@ LobeChat supports file upload and knowledge base functionality. You can upload v
[![][image-feat-privoder]][docs-feat-provider] [![][image-feat-privoder]][docs-feat-provider]
### `2` [Multi-Model Service Provider Support][docs-feat-provider] ### `5` [Multi-Model Service Provider Support][docs-feat-provider]
In the continuous development of LobeChat, we deeply understand the importance of diversity in model service providers for meeting the needs of the community when providing AI conversation services. Therefore, we have expanded our support to multiple model service providers, rather than being limited to a single one, in order to offer users a more diverse and rich selection of conversations. In the continuous development of LobeChat, we deeply understand the importance of diversity in model service providers for meeting the needs of the community when providing AI conversation services. Therefore, we have expanded our support to multiple model service providers, rather than being limited to a single one, in order to offer users a more diverse and rich selection of conversations.
@ -141,21 +183,56 @@ In this way, LobeChat can more flexibly adapt to the needs of different users, w
We have implemented support for the following model service providers: We have implemented support for the following model service providers:
- **AWS Bedrock**: Integrated with AWS Bedrock service, supporting models such as **Claude / LLama2**, providing powerful natural language processing capabilities. [Learn more](https://aws.amazon.com/cn/bedrock) <!-- PROVIDER LIST -->
- **Anthropic (Claude)**: Accessed Anthropic's **Claude** series models, including Claude 3 and Claude 2, with breakthroughs in multi-modal capabilities and extended context, setting a new industry benchmark. [Learn more](https://www.anthropic.com/claude)
- **Google AI (Gemini Pro, Gemini Vision)**: Access to Google's **Gemini** series models, including Gemini and Gemini Pro, to support advanced language understanding and generation. [Learn more](https://deepmind.google/technologies/gemini/) - **[OpenAI](https://lobechat.com/discover/provider/openai)**: OpenAI is a global leader in artificial intelligence research, with models like the GPT series pushing the frontiers of natural language processing. OpenAI is committed to transforming multiple industries through innovative and efficient AI solutions. Their products demonstrate significant performance and cost-effectiveness, widely used in research, business, and innovative applications.
- **Groq**: Accessed Groq's AI models, efficiently processing message sequences and generating responses, capable of multi-turn dialogues and single-interaction tasks. [Learn more](https://groq.com/) - **[Ollama](https://lobechat.com/discover/provider/ollama)**: Ollama provides models that cover a wide range of fields, including code generation, mathematical operations, multilingual processing, and conversational interaction, catering to diverse enterprise-level and localized deployment needs.
- **OpenRouter**: Supports routing of models including **Claude 3**, **Gemma**, **Mistral**, **Llama2** and **Cohere**, with intelligent routing optimization to improve usage efficiency, open and flexible. [Learn more](https://openrouter.ai/) - **[Anthropic](https://lobechat.com/discover/provider/anthropic)**: Anthropic is a company focused on AI research and development, offering a range of advanced language models such as Claude 3.5 Sonnet, Claude 3 Sonnet, Claude 3 Opus, and Claude 3 Haiku. These models achieve an ideal balance between intelligence, speed, and cost, suitable for various applications from enterprise workloads to rapid-response scenarios. Claude 3.5 Sonnet, as their latest model, has excelled in multiple evaluations while maintaining a high cost-performance ratio.
- **01.AI (Yi Model)**: Integrated the 01.AI models, with series of APIs featuring fast inference speed, which not only shortened the processing time, but also maintained excellent model performance. [Learn more](https://01.ai/) - **[Bedrock](https://lobechat.com/discover/provider/bedrock)**: Bedrock is a service provided by Amazon AWS, focusing on delivering advanced AI language and visual models for enterprises. Its model family includes Anthropic's Claude series, Meta's Llama 3.1 series, and more, offering a range of options from lightweight to high-performance, supporting tasks such as text generation, conversation, and image processing for businesses of varying scales and needs.
- **Together.ai**: Over 100 leading open-source Chat, Language, Image, Code, and Embedding models are available through the Together Inference API. For these models you pay just for what you use. [Learn more](https://www.together.ai/) - **[Google](https://lobechat.com/discover/provider/google)**: Google's Gemini series represents its most advanced, versatile AI models, developed by Google DeepMind, designed for multimodal capabilities, supporting seamless understanding and processing of text, code, images, audio, and video. Suitable for various environments from data centers to mobile devices, it significantly enhances the efficiency and applicability of AI models.
- **ChatGLM**: Added the **ChatGLM** series models from Zhipuai (GLM-4/GLM-4-vision/GLM-3-turbo), providing users with another efficient conversation model choice. [Learn more](https://www.zhipuai.cn/) - **[DeepSeek](https://lobechat.com/discover/provider/deepseek)**: DeepSeek is a company focused on AI technology research and application, with its latest model DeepSeek-V2.5 integrating general dialogue and code processing capabilities, achieving significant improvements in human preference alignment, writing tasks, and instruction following.
- **Moonshot AI (Dark Side of the Moon)**: Integrated with the Moonshot series models, an innovative AI startup from China, aiming to provide deeper conversation understanding. [Learn more](https://www.moonshot.cn/) - **[HuggingFace](https://lobechat.com/discover/provider/huggingface)**: The HuggingFace Inference API provides a fast and free way for you to explore thousands of models for various tasks. Whether you are prototyping for a new application or experimenting with the capabilities of machine learning, this API gives you instant access to high-performance models across multiple domains.
- **Minimax**: Integrated the Minimax models, including the MoE model **abab6**, offers a broader range of choices. [Learn more](https://www.minimaxi.com/) - **[OpenRouter](https://lobechat.com/discover/provider/openrouter)**: OpenRouter is a service platform providing access to various cutting-edge large model interfaces, supporting OpenAI, Anthropic, LLaMA, and more, suitable for diverse development and application needs. Users can flexibly choose the optimal model and pricing based on their requirements, enhancing the AI experience.
- **DeepSeek**: Integrated with the DeepSeek series models, an innovative AI startup from China, The product has been designed to provide a model that balances performance with price. [Learn more](https://www.deepseek.com/) - **[Cloudflare Workers AI](https://lobechat.com/discover/provider/cloudflare)**: Run serverless GPU-powered machine learning models on Cloudflare's global network.
- **Qwen**: Integrated the Qwen series models, including the latest **qwen-turbo**, **qwen-plus** and **qwen-max**. [Lean more](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction) - **[GitHub](https://lobechat.com/discover/provider/github)**: With GitHub Models, developers can become AI engineers and leverage the industry's leading AI models.
- **Novita AI**: Access **Llama**, **Mistral**, and other leading open-source models at cheapest prices. Engage in uncensored role-play, spark creative discussions, and foster unrestricted innovation. **Pay For What You Use.** [Learn more](https://novita.ai/llm-api?utm_source=lobechat&utm_medium=ch&utm_campaign=api)
<details><summary><kbd>See more providers (+27)</kbd></summary>
At the same time, we are also planning to support more model service providers, such as Replicate and Perplexity, to further enrich our service provider library. If you would like LobeChat to support your favorite service provider, feel free to join our [community discussion](https://github.com/lobehub/lobe-chat/discussions/1284).
- **[Novita](https://lobechat.com/discover/provider/novita)**: Novita AI is a platform providing a variety of large language models and AI image generation API services, flexible, reliable, and cost-effective. It supports the latest open-source models like Llama3 and Mistral, offering a comprehensive, user-friendly, and auto-scaling API solution for generative AI application development, suitable for the rapid growth of AI startups.
- **[PPIO](https://lobechat.com/discover/provider/ppio)**: PPIO supports stable and cost-efficient open-source LLM APIs, such as DeepSeek, Llama, Qwen etc.
- **[Together AI](https://lobechat.com/discover/provider/togetherai)**: Together AI is dedicated to achieving leading performance through innovative AI models, offering extensive customization capabilities, including rapid scaling support and intuitive deployment processes to meet various enterprise needs.
- **[Fireworks AI](https://lobechat.com/discover/provider/fireworksai)**: Fireworks AI is a leading provider of advanced language model services, focusing on functional calling and multimodal processing. Its latest model, Firefunction V2, is based on Llama-3, optimized for function calling, conversation, and instruction following. The visual language model FireLLaVA-13B supports mixed input of images and text. Other notable models include the Llama series and Mixtral series, providing efficient multilingual instruction following and generation support.
- **[Groq](https://lobechat.com/discover/provider/groq)**: Groq's LPU inference engine has excelled in the latest independent large language model (LLM) benchmarks, redefining the standards for AI solutions with its remarkable speed and efficiency. Groq represents instant inference speed, demonstrating strong performance in cloud-based deployments.
- **[Perplexity](https://lobechat.com/discover/provider/perplexity)**: Perplexity is a leading provider of conversational generation models, offering various advanced Llama 3.1 models that support both online and offline applications, particularly suited for complex natural language processing tasks.
- **[Mistral](https://lobechat.com/discover/provider/mistral)**: Mistral provides advanced general, specialized, and research models widely used in complex reasoning, multilingual tasks, and code generation. Through functional calling interfaces, users can integrate custom functionalities for specific applications.
- **[Ai21Labs](https://lobechat.com/discover/provider/ai21)**: AI21 Labs builds foundational models and AI systems for enterprises, accelerating the application of generative AI in production.
- **[Upstage](https://lobechat.com/discover/provider/upstage)**: Upstage focuses on developing AI models for various business needs, including Solar LLM and document AI, aiming to achieve artificial general intelligence (AGI) for work. It allows for the creation of simple conversational agents through Chat API and supports functional calling, translation, embedding, and domain-specific applications.
- **[xAI](https://lobechat.com/discover/provider/xai)**: xAI is a company dedicated to building artificial intelligence to accelerate human scientific discovery. Our mission is to advance our collective understanding of the universe.
- **[Qwen](https://lobechat.com/discover/provider/qwen)**: Tongyi Qianwen is a large-scale language model independently developed by Alibaba Cloud, featuring strong natural language understanding and generation capabilities. It can answer various questions, create written content, express opinions, and write code, playing a role in multiple fields.
- **[Wenxin](https://lobechat.com/discover/provider/wenxin)**: An enterprise-level one-stop platform for large model and AI-native application development and services, providing the most comprehensive and user-friendly toolchain for the entire process of generative artificial intelligence model development and application development.
- **[Hunyuan](https://lobechat.com/discover/provider/hunyuan)**: A large language model developed by Tencent, equipped with powerful Chinese creative capabilities, logical reasoning abilities in complex contexts, and reliable task execution skills.
- **[ZhiPu](https://lobechat.com/discover/provider/zhipu)**: Zhipu AI offers an open platform for multimodal and language models, supporting a wide range of AI application scenarios, including text processing, image understanding, and programming assistance.
- **[SiliconCloud](https://lobechat.com/discover/provider/siliconcloud)**: SiliconFlow is dedicated to accelerating AGI for the benefit of humanity, enhancing large-scale AI efficiency through an easy-to-use and cost-effective GenAI stack.
- **[01.AI](https://lobechat.com/discover/provider/zeroone)**: 01.AI focuses on AI 2.0 era technologies, vigorously promoting the innovation and application of 'human + artificial intelligence', using powerful models and advanced AI technologies to enhance human productivity and achieve technological empowerment.
- **[Spark](https://lobechat.com/discover/provider/spark)**: iFlytek's Spark model provides powerful AI capabilities across multiple domains and languages, utilizing advanced natural language processing technology to build innovative applications suitable for smart hardware, smart healthcare, smart finance, and other vertical scenarios.
- **[SenseNova](https://lobechat.com/discover/provider/sensenova)**: SenseNova, backed by SenseTime's robust infrastructure, offers efficient and user-friendly full-stack large model services.
- **[Stepfun](https://lobechat.com/discover/provider/stepfun)**: StepFun's large model possesses industry-leading multimodal and complex reasoning capabilities, supporting ultra-long text understanding and powerful autonomous scheduling search engine functions.
- **[Moonshot](https://lobechat.com/discover/provider/moonshot)**: Moonshot is an open-source platform launched by Beijing Dark Side Technology Co., Ltd., providing various natural language processing models with a wide range of applications, including but not limited to content creation, academic research, intelligent recommendations, and medical diagnosis, supporting long text processing and complex generation tasks.
- **[Baichuan](https://lobechat.com/discover/provider/baichuan)**: Baichuan Intelligence is a company focused on the research and development of large AI models, with its models excelling in domestic knowledge encyclopedias, long text processing, and generative creation tasks in Chinese, surpassing mainstream foreign models. Baichuan Intelligence also possesses industry-leading multimodal capabilities, performing excellently in multiple authoritative evaluations. Its models include Baichuan 4, Baichuan 3 Turbo, and Baichuan 3 Turbo 128k, each optimized for different application scenarios, providing cost-effective solutions.
- **[Minimax](https://lobechat.com/discover/provider/minimax)**: MiniMax is a general artificial intelligence technology company established in 2021, dedicated to co-creating intelligence with users. MiniMax has independently developed general large models of different modalities, including trillion-parameter MoE text models, voice models, and image models, and has launched applications such as Conch AI.
- **[InternLM](https://lobechat.com/discover/provider/internlm)**: An open-source organization dedicated to the research and development of large model toolchains. It provides an efficient and user-friendly open-source platform for all AI developers, making cutting-edge large models and algorithm technologies easily accessible.
- **[Higress](https://lobechat.com/discover/provider/higress)**: Higress is a cloud-native API gateway that was developed internally at Alibaba to address the issues of Tengine reload affecting long-lived connections and the insufficient load balancing capabilities for gRPC/Dubbo.
- **[Gitee AI](https://lobechat.com/discover/provider/giteeai)**: Gitee AI's Serverless API provides AI developers with an out of the box large model inference API service.
- **[Taichu](https://lobechat.com/discover/provider/taichu)**: The Institute of Automation, Chinese Academy of Sciences, and Wuhan Artificial Intelligence Research Institute have launched a new generation of multimodal large models, supporting comprehensive question-answering tasks such as multi-turn Q\&A, text creation, image generation, 3D understanding, and signal analysis, with stronger cognitive, understanding, and creative abilities, providing a new interactive experience.
- **[360 AI](https://lobechat.com/discover/provider/ai360)**: 360 AI is an AI model and service platform launched by 360 Company, offering various advanced natural language processing models, including 360GPT2 Pro, 360GPT Pro, 360GPT Turbo, and 360GPT Turbo Responsibility 8K. These models combine large-scale parameters and multimodal capabilities, widely applied in text generation, semantic understanding, dialogue systems, and code generation. With flexible pricing strategies, 360 AI meets diverse user needs, supports developer integration, and promotes the innovation and development of intelligent applications.
</details>
> 📊 Total providers: [<kbd>**37**</kbd>](https://lobechat.com/discover/providers)
<!-- PROVIDER LIST -->
At the same time, we are also planning to support more model service providers. If you would like LobeChat to support your favorite service provider, feel free to join our [💬 community discussion](https://github.com/lobehub/lobe-chat/discussions/1284).
<div align="right"> <div align="right">
@ -165,7 +242,7 @@ At the same time, we are also planning to support more model service providers,
[![][image-feat-local]][docs-feat-local] [![][image-feat-local]][docs-feat-local]
### `3` [Local Large Language Model (LLM) Support][docs-feat-local] ### `6` [Local Large Language Model (LLM) Support][docs-feat-local]
To meet the specific needs of users, LobeChat also supports the use of local models based on [Ollama](https://ollama.ai), allowing users to flexibly use their own or third-party models. To meet the specific needs of users, LobeChat also supports the use of local models based on [Ollama](https://ollama.ai), allowing users to flexibly use their own or third-party models.
@ -181,7 +258,7 @@ To meet the specific needs of users, LobeChat also supports the use of local mod
[![][image-feat-vision]][docs-feat-vision] [![][image-feat-vision]][docs-feat-vision]
### `4` [Model Visual Recognition][docs-feat-vision] ### `7` [Model Visual Recognition][docs-feat-vision]
LobeChat now supports OpenAI's latest [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) model with visual recognition capabilities, LobeChat now supports OpenAI's latest [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) model with visual recognition capabilities,
a multimodal intelligence that can perceive visuals. Users can easily upload or drag and drop images into the dialogue box, a multimodal intelligence that can perceive visuals. Users can easily upload or drag and drop images into the dialogue box,
@ -199,7 +276,7 @@ Whether it's sharing images in daily use or interpreting images within specific
[![][image-feat-tts]][docs-feat-tts] [![][image-feat-tts]][docs-feat-tts]
### `5` [TTS & STT Voice Conversation][docs-feat-tts] ### `8` [TTS & STT Voice Conversation][docs-feat-tts]
LobeChat supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies, enabling our application to convert text messages into clear voice outputs, LobeChat supports Text-to-Speech (TTS) and Speech-to-Text (STT) technologies, enabling our application to convert text messages into clear voice outputs,
allowing users to interact with our conversational agent as if they were talking to a real person. Users can choose from a variety of voices to pair with the agent. allowing users to interact with our conversational agent as if they were talking to a real person. Users can choose from a variety of voices to pair with the agent.
@ -216,7 +293,7 @@ Users can choose the voice that suits their personal preferences or specific sce
[![][image-feat-t2i]][docs-feat-t2i] [![][image-feat-t2i]][docs-feat-t2i]
### `6` [Text to Image Generation][docs-feat-t2i] ### `9` [Text to Image Generation][docs-feat-t2i]
With support for the latest text-to-image generation technology, LobeChat now allows users to invoke image creation tools directly within conversations with the agent. By leveraging the capabilities of AI tools such as [`DALL-E 3`](https://openai.com/dall-e-3), [`MidJourney`](https://www.midjourney.com/), and [`Pollinations`](https://pollinations.ai/), the agents are now equipped to transform your ideas into images. With support for the latest text-to-image generation technology, LobeChat now allows users to invoke image creation tools directly within conversations with the agent. By leveraging the capabilities of AI tools such as [`DALL-E 3`](https://openai.com/dall-e-3), [`MidJourney`](https://www.midjourney.com/), and [`Pollinations`](https://pollinations.ai/), the agents are now equipped to transform your ideas into images.
@ -230,7 +307,7 @@ This enables a more private and immersive creative process, allowing for the sea
[![][image-feat-plugin]][docs-feat-plugin] [![][image-feat-plugin]][docs-feat-plugin]
### `7` [Plugin System (Function Calling)][docs-feat-plugin] ### `10` [Plugin System (Function Calling)][docs-feat-plugin]
The plugin ecosystem of LobeChat is an important extension of its core functionality, greatly enhancing the practicality and flexibility of the LobeChat assistant. The plugin ecosystem of LobeChat is an important extension of its core functionality, greatly enhancing the practicality and flexibility of the LobeChat assistant.
@ -247,13 +324,13 @@ In addition, these plugins are not limited to news aggregation, but can also ext
<!-- PLUGIN LIST --> <!-- PLUGIN LIST -->
| Recent Submits | Description | | Recent Submits | Description |
| ---------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | | ---------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- |
| [Tongyi wanxiang Image Generator](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **YoungTx** on **2024-08-09**</sup> | This plugin uses Alibaba's Tongyi Wanxiang model to generate images based on text prompts.<br/>`image` `tongyi` `wanxiang` | | [Web](https://lobechat.com/discover/plugin/web)<br/><sup>By **Proghit** on **2025-01-24**</sup> | Smart web search that reads and analyzes pages to deliver comprehensive answers from Google results.<br/>`web` `search` |
| [Shopping tools](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **shoppingtools** on **2024-07-19**</sup> | Search for products on eBay & AliExpress, find eBay events & coupons. Get prompt examples.<br/>`shopping` `e-bay` `ali-express` `coupons` | | [MintbaseSearch](https://lobechat.com/discover/plugin/mintbasesearch)<br/><sup>By **mintbase** on **2024-12-31**</sup> | Find any NFT data on the NEAR Protocol.<br/>`crypto` `nft` |
| [Savvy Trader AI](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **savvytrader** on **2024-06-27**</sup> | Realtime stock, crypto and other investment data.<br/>`stock` `analyze` | | [Bing_websearch](https://lobechat.com/discover/plugin/Bingsearch-identifier)<br/><sup>By **FineHow** on **2024-12-22**</sup> | Search for information from the internet base BingApi<br/>`bingsearch` |
| [Search1API](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **fatwang2** on **2024-05-06**</sup> | Search aggregation service, specifically designed for LLMs<br/>`web` `search` | | [PortfolioMeta](https://lobechat.com/discover/plugin/StockData)<br/><sup>By **portfoliometa** on **2024-12-22**</sup> | Analyze stocks and get comprehensive real-time investment data and analytics.<br/>`stock` |
> 📊 Total plugins: [<kbd>**50**</kbd>](https://github.com/lobehub/lobe-chat-plugins) > 📊 Total plugins: [<kbd>**46**</kbd>](https://lobechat.com/discover/plugins)
<!-- PLUGIN LIST --> <!-- PLUGIN LIST -->
@ -265,7 +342,7 @@ In addition, these plugins are not limited to news aggregation, but can also ext
[![][image-feat-agent]][docs-feat-agent] [![][image-feat-agent]][docs-feat-agent]
### `8` [Agent Market (GPTs)][docs-feat-agent] ### `11` [Agent Market (GPTs)][docs-feat-agent]
In LobeChat Agent Marketplace, creators can discover a vibrant and innovative community that brings together a multitude of well-designed agents, In LobeChat Agent Marketplace, creators can discover a vibrant and innovative community that brings together a multitude of well-designed agents,
which not only play an important role in work scenarios but also offer great convenience in learning processes. which not only play an important role in work scenarios but also offer great convenience in learning processes.
@ -286,13 +363,13 @@ Our marketplace is not just a showcase platform but also a collaborative space.
<!-- AGENT LIST --> <!-- AGENT LIST -->
| Recent Submits | Description | | Recent Submits | Description |
| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| [Contract Clause Refiner v1.0](https://chat-preview.lobehub.com/market?agent=business-contract)<br/><sup>By **[houhoufm](https://github.com/houhoufm)** on **2024-09-24**</sup> | Output: {Optimize contract clauses for professional and concise expression}<br/>`contract-optimization` `legal-consultation` `copywriting` `terminology` `project-management` | | [审稿回复专家](https://lobechat.com/discover/assistant/academic-paper-overview)<br/><sup>By **[arvinxx](https://github.com/arvinxx)** on **2025-03-11**</sup> | 擅长高质量文献检索与分析的学术研究助手<br/>`学术研究` `文献检索` `数据分析` `信息提取` `咨询` |
| [Meeting Assistant v1.0](https://chat-preview.lobehub.com/market?agent=meeting)<br/><sup>By **[houhoufm](https://github.com/houhoufm)** on **2024-09-24**</sup> | Professional meeting report assistant, distilling meeting key points into report sentences<br/>`meeting-reports` `writing` `communication` `workflow` `professional-skills` | | [Cron Expression Assistant](https://lobechat.com/discover/assistant/crontab-generate)<br/><sup>By **[edgesider](https://github.com/edgesider)** on **2025-02-17**</sup> | Crontab Expression Generator<br/>`crontab` `time-expression` `trigger-time` `generator` `technical-assistance` |
| [Stable Album Cover Prompter](https://chat-preview.lobehub.com/market?agent=title-bpm-stimmung)<br/><sup>By **[MellowTrixX](https://github.com/MellowTrixX)** on **2024-09-24**</sup> | Professional graphic designer for front cover design specializing in creating visual concepts and designs for melodic techno music albums.<br/>`album-cover` `prompt` `stable-diffusion` `cover-design` `cover-prompts` | | [Xiao Zhi French Translation Assistant](https://lobechat.com/discover/assistant/xiao-zhi-french-translation-asst-v-1)<br/><sup>By **[WeR-Best](https://github.com/WeR-Best)** on **2025-02-10**</sup> | A friendly, professional, and empathetic AI assistant for French translation<br/>`ai-assistant` `french-translation` `cross-cultural-communication` `creativity` |
| [Advertising Copywriting Master](https://chat-preview.lobehub.com/market?agent=advertising-copywriting-master)<br/><sup>By **[leter](https://github.com/leter)** on **2024-09-23**</sup> | Specializing in product function analysis and advertising copywriting that resonates with user values<br/>`advertising-copy` `user-values` `marketing-strategy` | | [Investment Assistant](https://lobechat.com/discover/assistant/graham-investmentassi)<br/><sup>By **[farsightlin](https://github.com/farsightlin)** on **2025-02-06**</sup> | Helps users calculate the data needed for valuation<br/>`investment` `valuation` `financial-analysis` `calculator` |
> 📊 Total agents: [<kbd>**392**</kbd> ](https://github.com/lobehub/lobe-chat-agents) > 📊 Total agents: [<kbd>**488**</kbd> ](https://lobechat.com/discover/assistants)
<!-- AGENT LIST --> <!-- AGENT LIST -->
@ -304,7 +381,7 @@ Our marketplace is not just a showcase platform but also a collaborative space.
[![][image-feat-database]][docs-feat-database] [![][image-feat-database]][docs-feat-database]
### `9` [Support Local / Remote Database][docs-feat-database] ### `12` [Support Local / Remote Database][docs-feat-database]
LobeChat supports the use of both server-side and local databases. Depending on your needs, you can choose the appropriate deployment solution: LobeChat supports the use of both server-side and local databases. Depending on your needs, you can choose the appropriate deployment solution:
@ -321,7 +398,7 @@ Regardless of which database you choose, LobeChat can provide you with an excell
[![][image-feat-auth]][docs-feat-auth] [![][image-feat-auth]][docs-feat-auth]
### `10` [Support Multi-User Management][docs-feat-auth] ### `13` [Support Multi-User Management][docs-feat-auth]
LobeChat supports multi-user management and provides two main user authentication and management solutions to meet different needs: LobeChat supports multi-user management and provides two main user authentication and management solutions to meet different needs:
@ -339,7 +416,7 @@ Regardless of which user management solution you choose, LobeChat can provide yo
[![][image-feat-pwa]][docs-feat-pwa] [![][image-feat-pwa]][docs-feat-pwa]
### `11` [Progressive Web App (PWA)][docs-feat-pwa] ### `14` [Progressive Web App (PWA)][docs-feat-pwa]
We deeply understand the importance of providing a seamless experience for users in today's multi-device environment. We deeply understand the importance of providing a seamless experience for users in today's multi-device environment.
Therefore, we have adopted Progressive Web Application ([PWA](https://support.google.com/chrome/answer/9658361)) technology, Therefore, we have adopted Progressive Web Application ([PWA](https://support.google.com/chrome/answer/9658361)) technology,
@ -366,7 +443,7 @@ providing smooth animations, responsive layouts, and adapting to different devic
[![][image-feat-mobile]][docs-feat-mobile] [![][image-feat-mobile]][docs-feat-mobile]
### `12` [Mobile Device Adaptation][docs-feat-mobile] ### `15` [Mobile Device Adaptation][docs-feat-mobile]
We have carried out a series of optimization designs for mobile devices to enhance the user's mobile experience. Currently, we are iterating on the mobile user experience to achieve smoother and more intuitive interactions. If you have any suggestions or ideas, we welcome you to provide feedback through GitHub Issues or Pull Requests. We have carried out a series of optimization designs for mobile devices to enhance the user's mobile experience. Currently, we are iterating on the mobile user experience to achieve smoother and more intuitive interactions. If you have any suggestions or ideas, we welcome you to provide feedback through GitHub Issues or Pull Requests.
@ -378,7 +455,7 @@ We have carried out a series of optimization designs for mobile devices to enhan
[![][image-feat-theme]][docs-feat-theme] [![][image-feat-theme]][docs-feat-theme]
### `13` [Custom Themes][docs-feat-theme] ### `16` [Custom Themes][docs-feat-theme]
As a design-engineering-oriented application, LobeChat places great emphasis on users' personalized experiences, As a design-engineering-oriented application, LobeChat places great emphasis on users' personalized experiences,
hence introducing flexible and diverse theme modes, including a light mode for daytime and a dark mode for nighttime. hence introducing flexible and diverse theme modes, including a light mode for daytime and a dark mode for nighttime.
@ -439,15 +516,15 @@ Beside these features, LobeChat also have much better basic technique undergroun
## 🛳 Self Hosting ## 🛳 Self Hosting
LobeChat provides Self-Hosted Version with Vercel and [Docker Image][docker-release-link]. This allows you to deploy your own chatbot within a few minutes without any prior knowledge. LobeChat provides Self-Hosted Version with Vercel, Alibaba Cloud, and [Docker Image][docker-release-link]. This allows you to deploy your own chatbot within a few minutes without any prior knowledge.
> \[!TIP] > \[!TIP]
> >
> Learn more about [📘 Build your own LobeChat][docs-self-hosting] by checking it out. > Learn more about [📘 Build your own LobeChat][docs-self-hosting] by checking it out.
### `A` Deploying with Vercel, Zeabur or Sealos ### `A` Deploying with Vercel, Zeabur , Sealos or Alibaba Cloud
If you want to deploy this service yourself on either Vercel or Zeabur, you can follow these steps: "If you want to deploy this service yourself on Vercel, Zeabur or Alibaba Cloud, you can follow these steps:
- Prepare your [OpenAI API Key](https://platform.openai.com/account/api-keys). - Prepare your [OpenAI API Key](https://platform.openai.com/account/api-keys).
- Click the button below to start deployment: Log in directly with your GitHub account, and remember to fill in the `OPENAI_API_KEY`(required) and `ACCESS_CODE` (recommended) on the environment variable section. - Click the button below to start deployment: Log in directly with your GitHub account, and remember to fill in the `OPENAI_API_KEY`(required) and `ACCESS_CODE` (recommended) on the environment variable section.
@ -456,9 +533,9 @@ If you want to deploy this service yourself on either Vercel or Zeabur, you can
<div align="center"> <div align="center">
| Deploy with Vercel | Deploy with Zeabur | Deploy with Sealos | Deploy with RepoCloud | | Deploy with Vercel | Deploy with Zeabur | Deploy with Sealos | Deploy with RepoCloud | Deploy with Alibaba Cloud |
| :-------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------------: | | :-------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------------: | :-----------------------------------------------------------------------: |
| [![][deploy-button-image]][deploy-link] | [![][deploy-on-zeabur-button-image]][deploy-on-zeabur-link] | [![][deploy-on-sealos-button-image]][deploy-on-sealos-link] | [![][deploy-on-repocloud-button-image]][deploy-on-repocloud-link] | | [![][deploy-button-image]][deploy-link] | [![][deploy-on-zeabur-button-image]][deploy-on-zeabur-link] | [![][deploy-on-sealos-button-image]][deploy-on-sealos-link] | [![][deploy-on-repocloud-button-image]][deploy-on-repocloud-link] | [![][deploy-on-alibaba-cloud-button-image]][deploy-on-alibaba-cloud-link] |
</div> </div>
@ -484,25 +561,22 @@ If you have deployed your own project following the one-click deployment steps i
We provide a Docker image for deploying the LobeChat service on your own private device. Use the following command to start the LobeChat service: We provide a Docker image for deploying the LobeChat service on your own private device. Use the following command to start the LobeChat service:
1. create a folder to for storage files
```fish ```fish
$ docker run -d -p 3210:3210 \ $ mkdir lobe-chat-db && cd lobe-chat-db
-e OPENAI_API_KEY=sk-xxxx \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
``` ```
> \[!TIP] 2. init the LobeChat infrastructure
>
> If you need to use the OpenAI service through a proxy, you can configure the proxy address using the `OPENAI_PROXY_URL` environment variable: ```fish
bash <(curl -fsSL https://lobe.li/setup.sh)
```
3. Start the LobeChat service
```fish ```fish
$ docker run -d -p 3210:3210 \ docker compose up -d
-e OPENAI_API_KEY=sk-xxxx \
-e OPENAI_PROXY_URL=https://api-proxy.com/v1 \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
``` ```
> \[!NOTE] > \[!NOTE]
@ -595,7 +669,7 @@ If you would like to learn more details, please feel free to look at our [📘 D
## 🤝 Contributing ## 🤝 Contributing
Contributions of all types are more than welcome; if you are interested in contributing code, feel free to check out our GitHub [Issues][github-issues-link] and [Projects][github-project-link] to get stuck in to show us what youre made of. Contributions of all types are more than welcome; if you are interested in contributing code, feel free to check out our GitHub [Issues][github-issues-link] and [Projects][github-project-link] to get stuck in to show us what you're made of.
> \[!TIP] > \[!TIP]
> >
@ -692,6 +766,7 @@ This project is [Apache 2.0](./LICENSE) licensed.
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square [back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square
[blog]: https://lobehub.com/blog [blog]: https://lobehub.com/blog
[changelog]: https://lobehub.com/changelog
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg [chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html [chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg [chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
@ -705,27 +780,32 @@ This project is [Apache 2.0](./LICENSE) licensed.
[codespaces-shield]: https://github.com/codespaces/badge.svg [codespaces-shield]: https://github.com/codespaces/badge.svg
[deploy-button-image]: https://vercel.com/button [deploy-button-image]: https://vercel.com/button
[deploy-link]: https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat&env=OPENAI_API_KEY,ACCESS_CODE&envDescription=Find%20your%20OpenAI%20API%20Key%20by%20click%20the%20right%20Learn%20More%20button.%20%7C%20Access%20Code%20can%20protect%20your%20website&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=lobe-chat&repository-name=lobe-chat [deploy-link]: https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat&env=OPENAI_API_KEY,ACCESS_CODE&envDescription=Find%20your%20OpenAI%20API%20Key%20by%20click%20the%20right%20Learn%20More%20button.%20%7C%20Access%20Code%20can%20protect%20your%20website&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=lobe-chat&repository-name=lobe-chat
[deploy-on-alibaba-cloud-button-image]: https://service-info-public.oss-cn-hangzhou.aliyuncs.com/computenest-en.svg
[deploy-on-alibaba-cloud-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeChat%E7%A4%BE%E5%8C%BA%E7%89%88
[deploy-on-repocloud-button-image]: https://d16t0pc4846x52.cloudfront.net/deploylobe.svg [deploy-on-repocloud-button-image]: https://d16t0pc4846x52.cloudfront.net/deploylobe.svg
[deploy-on-repocloud-link]: https://repocloud.io/details/?app_id=248 [deploy-on-repocloud-link]: https://repocloud.io/details/?app_id=248
[deploy-on-sealos-button-image]: https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg [deploy-on-sealos-button-image]: https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg
[deploy-on-sealos-link]: https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dlobe-chat [deploy-on-sealos-link]: https://template.usw.sealos.io/deploy?templateName=lobe-chat-db
[deploy-on-zeabur-button-image]: https://zeabur.com/button.svg [deploy-on-zeabur-button-image]: https://zeabur.com/button.svg
[deploy-on-zeabur-link]: https://zeabur.com/templates/VZGGTI [deploy-on-zeabur-link]: https://zeabur.com/templates/VZGGTI
[discord-link]: https://discord.gg/AYFPHvv2jT [discord-link]: https://discord.gg/AYFPHvv2jT
[discord-shield]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=flat-square [discord-shield]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=flat-square
[discord-shield-badge]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=for-the-badge [discord-shield-badge]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=for-the-badge
[docker-pulls-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-pulls-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-pulls-shield]: https://img.shields.io/docker/pulls/lobehub/lobe-chat?color=45cc11&labelColor=black&style=flat-square [docker-pulls-shield]: https://img.shields.io/docker/pulls/lobehub/lobe-chat?color=45cc11&labelColor=black&style=flat-square&sort=semver
[docker-release-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-release-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-release-shield]: https://img.shields.io/docker/v/lobehub/lobe-chat?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square [docker-release-shield]: https://img.shields.io/docker/v/lobehub/lobe-chat-database?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square&sort=semver
[docker-size-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-size-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-size-shield]: https://img.shields.io/docker/image-size/lobehub/lobe-chat?color=369eff&labelColor=black&style=flat-square [docker-size-shield]: https://img.shields.io/docker/image-size/lobehub/lobe-chat-database?color=369eff&labelColor=black&style=flat-square&sort=semver
[docs]: https://lobehub.com/docs/usage/start [docs]: https://lobehub.com/docs/usage/start
[docs-dev-guide]: https://github.com/lobehub/lobe-chat/wiki/index [docs-dev-guide]: https://github.com/lobehub/lobe-chat/wiki/index
[docs-docker]: https://lobehub.com/docs/self-hosting/platform/docker [docs-docker]: https://lobehub.com/docs/self-hosting/server-database/docker-compose
[docs-env-var]: https://lobehub.com/docs/self-hosting/environment-variables [docs-env-var]: https://lobehub.com/docs/self-hosting/environment-variables
[docs-feat-agent]: https://lobehub.com/docs/usage/features/agent-market [docs-feat-agent]: https://lobehub.com/docs/usage/features/agent-market
[docs-feat-artifacts]: https://lobehub.com/docs/usage/features/artifacts
[docs-feat-auth]: https://lobehub.com/docs/usage/features/auth [docs-feat-auth]: https://lobehub.com/docs/usage/features/auth
[docs-feat-branch]: https://lobehub.com/docs/usage/features/branching-conversations
[docs-feat-cot]: https://lobehub.com/docs/usage/features/cot
[docs-feat-database]: https://lobehub.com/docs/usage/features/database [docs-feat-database]: https://lobehub.com/docs/usage/features/database
[docs-feat-knowledgebase]: https://lobehub.com/blog/knowledge-base [docs-feat-knowledgebase]: https://lobehub.com/blog/knowledge-base
[docs-feat-local]: https://lobehub.com/docs/usage/features/local-llm [docs-feat-local]: https://lobehub.com/docs/usage/features/local-llm
@ -767,22 +847,25 @@ This project is [Apache 2.0](./LICENSE) licensed.
[github-stars-shield]: https://img.shields.io/github/stars/lobehub/lobe-chat?color=ffcb47&labelColor=black&style=flat-square [github-stars-shield]: https://img.shields.io/github/stars/lobehub/lobe-chat?color=ffcb47&labelColor=black&style=flat-square
[github-trending-shield]: https://trendshift.io/api/badge/repositories/2256 [github-trending-shield]: https://trendshift.io/api/badge/repositories/2256
[github-trending-url]: https://trendshift.io/repositories/2256 [github-trending-url]: https://trendshift.io/repositories/2256
[image-banner]: https://github.com/lobehub/lobe-chat/assets/28616219/9f155dff-4737-429f-9cad-a70a1a860c5f [image-banner]: https://github.com/user-attachments/assets/6f293c7f-47b4-47eb-9202-fe68a942d35b
[image-feat-agent]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670869-f1ffbf66-42b6-42cf-a937-9ce1f8328514.png [image-feat-agent]: https://github.com/user-attachments/assets/b3ab6e35-4fbc-468d-af10-e3e0c687350f
[image-feat-auth]: https://github.com/lobehub/lobe-chat/assets/17870709/8ce70e15-40df-451e-b700-66090fe5b8c2 [image-feat-artifacts]: https://github.com/user-attachments/assets/7f95fad6-b210-4e6e-84a0-7f39e96f3a00
[image-feat-database]: https://github.com/lobehub/lobe-chat/assets/17870709/c27a0234-a4e9-40e5-8bcb-42d5ce7e40f9 [image-feat-auth]: https://github.com/user-attachments/assets/80bb232e-19d1-4f97-98d6-e291f3585e6d
[image-feat-knowledgebase]: https://github.com/user-attachments/assets/77e58e1c-c82f-4341-b159-f4eeede9967f [image-feat-branch]: https://github.com/user-attachments/assets/92f72082-02bd-4835-9c54-b089aad7fd41
[image-feat-local]: https://github.com/lobehub/lobe-chat/assets/28616219/ca9a21bc-ea6c-4c90-bf4a-fa53b4fb2b5c [image-feat-cot]: https://github.com/user-attachments/assets/f74f1139-d115-4e9c-8c43-040a53797a5e
[image-feat-mobile]: https://gw.alipayobjects.com/zos/kitchen/R441AuFS4W/mobile.webp [image-feat-database]: https://github.com/user-attachments/assets/f1697c8b-d1fb-4dac-ba05-153c6295d91d
[image-feat-plugin]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670883-33c43a5c-a512-467e-855c-fa299548cce5.png [image-feat-knowledgebase]: https://github.com/user-attachments/assets/7da7a3b2-92fd-4630-9f4e-8560c74955ae
[image-feat-privoder]: https://github.com/lobehub/lobe-chat/assets/28616219/b164bc54-8ba2-4c1e-b2f2-f4d7f7e7a551 [image-feat-local]: https://github.com/user-attachments/assets/1239da50-d832-4632-a7ef-bd754c0f3850
[image-feat-pwa]: https://gw.alipayobjects.com/zos/kitchen/69x6bllkX3/pwa.webp [image-feat-mobile]: https://github.com/user-attachments/assets/32cf43c4-96bd-4a4c-bfb6-59acde6fe380
[image-feat-t2i]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/297746445-0ff762b9-aa08-4337-afb7-12f932b6efbb.png [image-feat-plugin]: https://github.com/user-attachments/assets/66a891ac-01b6-4e3f-b978-2eb07b489b1b
[image-feat-theme]: https://gw.alipayobjects.com/zos/kitchen/pvus1lo%26Z7/darkmode.webp [image-feat-privoder]: https://github.com/user-attachments/assets/e553e407-42de-4919-977d-7dbfcf44a821
[image-feat-tts]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072124-c9853d8d-f1b5-44a8-a305-45ebc0f6d19a.png [image-feat-pwa]: https://github.com/user-attachments/assets/9647f70f-b71b-43b6-9564-7cdd12d1c24d
[image-feat-vision]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072129-382bdf30-e3d6-4411-b5a0-249710b8ba08.png [image-feat-t2i]: https://github.com/user-attachments/assets/708274a7-2458-494b-a6ec-b73dfa1fa7c2
[image-overview]: https://github.com/lobehub/lobe-chat/assets/17870709/56b95d48-f573-41cd-8b38-387bf88bc4bf [image-feat-theme]: https://github.com/user-attachments/assets/b47c39f1-806f-492b-8fcb-b0fa973937c1
[image-star]: https://github.com/lobehub/lobe-chat/assets/17870709/cb06b748-513f-47c2-8740-d876858d7855 [image-feat-tts]: https://github.com/user-attachments/assets/50189597-2cc3-4002-b4c8-756a52ad5c0a
[image-feat-vision]: https://github.com/user-attachments/assets/18574a1f-46c2-4cbc-af2c-35a86e128a07
[image-overview]: https://github.com/user-attachments/assets/dbfaa84a-2c82-4dd9-815c-5be616f264a4
[image-star]: https://github.com/user-attachments/assets/c3b482e7-cef5-4e94-bef9-226900ecfaab
[issues-link]: https://img.shields.io/github/issues/lobehub/lobe-chat.svg?style=flat [issues-link]: https://img.shields.io/github/issues/lobehub/lobe-chat.svg?style=flat
[lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins [lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins
[lobe-commit]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-commit [lobe-commit]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-commit
@ -807,7 +890,7 @@ This project is [Apache 2.0](./LICENSE) licensed.
[profile-link]: https://github.com/lobehub [profile-link]: https://github.com/lobehub
[share-linkedin-link]: https://linkedin.com/feed [share-linkedin-link]: https://linkedin.com/feed
[share-linkedin-shield]: https://img.shields.io/badge/-share%20on%20linkedin-black?labelColor=black&logo=linkedin&logoColor=white&style=flat-square [share-linkedin-shield]: https://img.shields.io/badge/-share%20on%20linkedin-black?labelColor=black&logo=linkedin&logoColor=white&style=flat-square
[share-mastodon-link]: https://mastodon.social/share?text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source,%20extensible%20(Function%20Calling),%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT/LLM%20web%20application.%20https://github.com/lobehub/lobe-chat%20#chatbot%20#chatGPT%20#openAI [share-mastodon-link]: https://mastodon.social/share?text=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source,%20extensible%20%28Function%20Calling%29,%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20https://github.com/lobehub/lobe-chat%20#chatbot%20#chatGPT%20#openAI
[share-mastodon-shield]: https://img.shields.io/badge/-share%20on%20mastodon-black?labelColor=black&logo=mastodon&logoColor=white&style=flat-square [share-mastodon-shield]: https://img.shields.io/badge/-share%20on%20mastodon-black?labelColor=black&logo=mastodon&logoColor=white&style=flat-square
[share-reddit-link]: https://www.reddit.com/submit?title=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20%23chatbot%20%23chatGPT%20%23openAI&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat [share-reddit-link]: https://www.reddit.com/submit?title=Check%20this%20GitHub%20repository%20out%20%F0%9F%A4%AF%20LobeChat%20-%20An%20open-source%2C%20extensible%20%28Function%20Calling%29%2C%20high-performance%20chatbot%20framework.%20It%20supports%20one-click%20free%20deployment%20of%20your%20private%20ChatGPT%2FLLM%20web%20application.%20%23chatbot%20%23chatGPT%20%23openAI&url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat
[share-reddit-shield]: https://img.shields.io/badge/-share%20on%20reddit-black?labelColor=black&logo=reddit&logoColor=white&style=flat-square [share-reddit-shield]: https://img.shields.io/badge/-share%20on%20reddit-black?labelColor=black&logo=reddit&logoColor=white&style=flat-square

@ -8,7 +8,7 @@
支持语音合成、多模态、可扩展的([function call][docs-functionc-call])插件系统<br/> 支持语音合成、多模态、可扩展的([function call][docs-functionc-call])插件系统<br/>
一键**免费**拥有你自己的 ChatGPT/Gemini/Claude/Ollama 应用 一键**免费**拥有你自己的 ChatGPT/Gemini/Claude/Ollama 应用
[English](./README.md) · **简体中文** · [日本語](./README.ja-JP.md) · [官网][official-site] · [更新日志](./CHANGELOG.md) · [文档][docs] · [博客][blog] · [反馈问题][github-issues-link] [English](./README.md) · **简体中文** · [官网][official-site] · [更新日志][changelog] · [文档][docs] · [博客][blog] · [反馈问题][github-issues-link]
<!-- SHIELD GROUP --> <!-- SHIELD GROUP -->
@ -41,7 +41,7 @@
[![][github-trending-shield]][github-trending-url] [![][github-trending-shield]][github-trending-url]
[![][github-hello-shield]][github-hello-url] [![][github-hello-shield]][github-hello-url]
[![][image-overview]][vercel-link] ![][image-overview]
</div> </div>
@ -52,23 +52,26 @@
- [👋🏻 开始使用 & 交流](#-开始使用--交流) - [👋🏻 开始使用 & 交流](#-开始使用--交流)
- [✨ 特性一览](#-特性一览) - [✨ 特性一览](#-特性一览)
- [`1` 文件上传 / 知识库](#1-文件上传--知识库) - [`1` 思维链 (CoT)](#1-思维链-cot)
- [`2` 多模型服务商支持](#2-多模型服务商支持) - [`2` 分支对话](#2-分支对话)
- [`3` 支持本地大语言模型 (LLM)](#3-支持本地大语言模型-llm) - [`3` 支持白板 (Artifacts)](#3-支持白板-artifacts)
- [`4` 模型视觉识别 (Model Visual)](#4-模型视觉识别-model-visual) - [`4` 文件上传 / 知识库](#4-文件上传--知识库)
- [`5` TTS & STT 语音会话](#5-tts--stt-语音会话) - [`5` 多模型服务商支持](#5-多模型服务商支持)
- [`6` Text to Image 文生图](#6-text-to-image-文生图) - [`6` 支持本地大语言模型 (LLM)](#6-支持本地大语言模型-llm)
- [`7` 插件系统 (Tools Calling)](#7-插件系统-tools-calling) - [`7` 模型视觉识别 (Model Visual)](#7-模型视觉识别-model-visual)
- [`8` 助手市场 (GPTs)](#8-助手市场-gpts) - [`8` TTS & STT 语音会话](#8-tts--stt-语音会话)
- [`9` 支持本地 / 远程数据库](#9-支持本地--远程数据库) - [`9` Text to Image 文生图](#9-text-to-image-文生图)
- [`10` 支持多用户管理](#10-支持多用户管理) - [`10` 插件系统 (Tools Calling)](#10-插件系统-tools-calling)
- [`11` 渐进式 Web 应用 (PWA)](#11-渐进式-web-应用-pwa) - [`11` 助手市场 (GPTs)](#11-助手市场-gpts)
- [`12` 移动设备适配](#12-移动设备适配) - [`12` 支持本地 / 远程数据库](#12-支持本地--远程数据库)
- [`13` 自定义主题](#13-自定义主题) - [`13` 支持多用户管理](#13-支持多用户管理)
- [更多特性](#更多特性) - [`14` 渐进式 Web 应用 (PWA)](#14-渐进式-web-应用-pwa)
- [`15` 移动设备适配](#15-移动设备适配)
- [`16` 自定义主题](#16-自定义主题)
- [`*` 更多特性](#-更多特性)
- [⚡️ 性能测试](#-性能测试) - [⚡️ 性能测试](#-性能测试)
- [🛳 开箱即用](#-开箱即用) - [🛳 开箱即用](#-开箱即用)
- [`A` 使用 Vercel、Zeabur 或 Sealos 部署](#a-使用-vercelzeabur-或-sealos-部署) - [`A` 使用 Vercel、Zeabur 、Sealos 或 阿里云计算巢 部署](#a-使用-vercelzeabur-sealos-或-阿里云计算巢-部署)
- [`B` 使用 Docker 部署](#b-使用-docker-部署) - [`B` 使用 Docker 部署](#b-使用-docker-部署)
- [环境变量](#环境变量) - [环境变量](#环境变量)
- [获取 OpenAI API Key](#获取-openai-api-key) - [获取 OpenAI API Key](#获取-openai-api-key)
@ -111,9 +114,48 @@
## ✨ 特性一览 ## ✨ 特性一览
[![][image-feat-cot]][docs-feat-cot]
### `1` [思维链 (CoT)][docs-feat-cot]
体验前所未有的 AI 推理过程。通过创新的思维链CoT可视化功能您可以实时观察复杂问题是如何一步步被解析的。这项突破性的功能为 AI 的决策过程提供了前所未有的透明度,让您能够清晰地了解结论是如何得出的。
通过将复杂的推理过程分解为清晰的逻辑步骤,您可以更好地理解和验证 AI 的解题思路。无论您是在调试问题、学习知识,还是单纯对 AI 推理感兴趣,思维链可视化都能将抽象思维转化为一种引人入胜的互动体验。
[![][back-to-top]](#readme-top)
[![][image-feat-branch]][docs-feat-branch]
### `2` [分支对话][docs-feat-branch]
为您带来更自然、更灵活的 AI 对话方式。通过分支对话功能,您的讨论可以像人类对话一样自然延伸。在任意消息处创建新的对话分支,让您在保留原有上下文的同时,自由探索不同的对话方向。
两种强大模式任您选择:
- **延续模式**:无缝延展当前讨论,保持宝贵的对话上下文
- **独立模式**:基于任意历史消息,开启全新话题探讨
这项突破性功能将线性对话转变为动态的树状结构,让您能够更深入地探索想法,实现更高效的互动体验。
[![][back-to-top]](#readme-top)
[![][image-feat-artifacts]][docs-feat-artifacts]
### `3` [支持白板 (Artifacts)][docs-feat-artifacts]
体验集成于 LobeChat 的 Claude Artifacts 能力。这项革命性功能突破了 AI 人机交互的边界,让您能够实时创建和可视化各种格式的内容。
以前所未有的灵活度进行创作与可视化:
- 生成并展示动态 SVG 图形
- 实时构建与渲染交互式 HTML 页面
- 输出多种格式的专业文档
[![][back-to-top]](#readme-top)
[![][image-feat-knowledgebase]][docs-feat-knowledgebase] [![][image-feat-knowledgebase]][docs-feat-knowledgebase]
### `1` [文件上传 / 知识库][docs-feat-knowledgebase] ### `4` [文件上传 / 知识库][docs-feat-knowledgebase]
LobeChat 支持文件上传与知识库功能,你可以上传文件、图片、音频、视频等多种类型的文件,以及创建知识库,方便用户管理和查找文件。同时在对话中使用文件和知识库功能,实现更加丰富的对话体验。 LobeChat 支持文件上传与知识库功能,你可以上传文件、图片、音频、视频等多种类型的文件,以及创建知识库,方便用户管理和查找文件。同时在对话中使用文件和知识库功能,实现更加丰富的对话体验。
@ -131,7 +173,7 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
[![][image-feat-privoder]][docs-feat-provider] [![][image-feat-privoder]][docs-feat-provider]
### `2` [多模型服务商支持][docs-feat-provider] ### `5` [多模型服务商支持][docs-feat-provider]
在 LobeChat 的不断发展过程中,我们深刻理解到在提供 AI 会话服务时模型服务商的多样性对于满足社区需求的重要性。因此,我们不再局限于单一的模型服务商,而是拓展了对多种模型服务商的支持,以便为用户提供更为丰富和多样化的会话选择。 在 LobeChat 的不断发展过程中,我们深刻理解到在提供 AI 会话服务时模型服务商的多样性对于满足社区需求的重要性。因此,我们不再局限于单一的模型服务商,而是拓展了对多种模型服务商的支持,以便为用户提供更为丰富和多样化的会话选择。
@ -141,20 +183,56 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
我们已经实现了对以下模型服务商的支持: 我们已经实现了对以下模型服务商的支持:
- **AWS Bedrock**:集成了 AWS Bedrock 服务,支持了 **Claude / LLama2** 等模型,提供了强大的自然语言处理能力。[了解更多](https://aws.amazon.com/cn/bedrock) <!-- PROVIDER LIST -->
- **Google AI (Gemini Pro、Gemini Vision)**:接入了 Google 的 **Gemini** 系列模型,包括 Gemini 和 Gemini Pro以支持更高级的语言理解和生成。[了解更多](https://deepmind.google/technologies/gemini/)
- **Anthropic (Claude)**:接入了 Anthropic 的 **Claude** 系列模型,包括 Claude 3 和 Claude 2多模态突破超长上下文树立行业新基准。[了解更多](https://www.anthropic.com/claude) - **[OpenAI](https://lobechat.com/discover/provider/openai)**: OpenAI 是全球领先的人工智能研究机构,其开发的模型如 GPT 系列推动了自然语言处理的前沿。OpenAI 致力于通过创新和高效的 AI 解决方案改变多个行业。他们的产品具有显著的性能和经济性,广泛用于研究、商业和创新应用。
- **ChatGLM**:加入了智谱的 **ChatGLM** 系列模型GLM-4/GLM-4-vision/GLM-3-turbo为用户提供了另一种高效的会话模型选择。[了解更多](https://www.zhipuai.cn/) - **[Ollama](https://lobechat.com/discover/provider/ollama)**: Ollama 提供的模型广泛涵盖代码生成、数学运算、多语种处理和对话互动等领域,支持企业级和本地化部署的多样化需求。
- **Moonshot AI (月之暗面)**:集成了 Moonshot 系列模型,这是一家来自中国的创新性 AI 创业公司,旨在提供更深层次的会话理解。[了解更多](https://www.moonshot.cn/) - **[Anthropic](https://lobechat.com/discover/provider/anthropic)**: Anthropic 是一家专注于人工智能研究和开发的公司,提供了一系列先进的语言模型,如 Claude 3.5 Sonnet、Claude 3 Sonnet、Claude 3 Opus 和 Claude 3 Haiku。这些模型在智能、速度和成本之间取得了理想的平衡适用于从企业级工作负载到快速响应的各种应用场景。Claude 3.5 Sonnet 作为其最新模型,在多项评估中表现优异,同时保持了较高的性价比。
- **Together.ai**:集成部署了数百种开源模型和向量模型,无需本地部署即可随时访问这些模型。[了解更多](https://www.together.ai/) - **[Bedrock](https://lobechat.com/discover/provider/bedrock)**: Bedrock 是亚马逊 AWS 提供的一项服务,专注于为企业提供先进的 AI 语言模型和视觉模型。其模型家族包括 Anthropic 的 Claude 系列、Meta 的 Llama 3.1 系列等,涵盖从轻量级到高性能的多种选择,支持文本生成、对话、图像处理等多种任务,适用于不同规模和需求的企业应用。
- **01.AI (零一万物)**:集成了零一万物模型,系列 API 具备较快的推理速度,这不仅缩短了处理时间,同时也保持了出色的模型效果。[了解更多](https://www.lingyiwanwu.com/) - **[Google](https://lobechat.com/discover/provider/google)**: Google 的 Gemini 系列是其最先进、通用的 AI 模型,由 Google DeepMind 打造,专为多模态设计,支持文本、代码、图像、音频和视频的无缝理解与处理。适用于从数据中心到移动设备的多种环境,极大提升了 AI 模型的效率与应用广泛性。
- **Groq**:接入了 Groq 的 AI 模型,高效处理消息序列,生成回应,胜任多轮对话及单次交互任务。[了解更多](https://groq.com/) - **[DeepSeek](https://lobechat.com/discover/provider/deepseek)**: DeepSeek 是一家专注于人工智能技术研究和应用的公司,其最新模型 DeepSeek-V3 多项评测成绩超越 Qwen2.5-72B 和 Llama-3.1-405B 等开源模型,性能对齐领军闭源模型 GPT-4o 与 Claude-3.5-Sonnet。
- **OpenRouter**:其支持包括 **Claude 3****Gemma****Mistral****Llama2**和**Cohere**等模型路由,支持智能路由优化,提升使用效率,开放且灵活。[了解更多](https://openrouter.ai/) - **[HuggingFace](https://lobechat.com/discover/provider/huggingface)**: HuggingFace Inference API 提供了一种快速且免费的方式,让您可以探索成千上万种模型,适用于各种任务。无论您是在为新应用程序进行原型设计,还是在尝试机器学习的功能,这个 API 都能让您即时访问多个领域的高性能模型。
- **Minimax**: 接入了 Minimax 的 AI 模型,包括 MoE 模型 **abab6**,提供了更多的选择空间。[了解更多](https://www.minimaxi.com/) - **[OpenRouter](https://lobechat.com/discover/provider/openrouter)**: OpenRouter 是一个提供多种前沿大模型接口的服务平台,支持 OpenAI、Anthropic、LLaMA 及更多,适合多样化的开发和应用需求。用户可根据自身需求灵活选择最优的模型和价格,助力 AI 体验的提升。
- **DeepSeek**: 接入了 DeepSeek 的 AI 模型,包括最新的 **DeepSeek-V2**,提供兼顾性能与价格的模型。[了解更多](https://www.deepseek.com/) - **[Cloudflare Workers AI](https://lobechat.com/discover/provider/cloudflare)**: 在 Cloudflare 的全球网络上运行由无服务器 GPU 驱动的机器学习模型。
- **Qwen**: 接入了 Qwen 的 AI 模型,包括最新的 **qwen-turbo****qwen-plus** 和 **qwen-max** 等模型。[了解更多](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction) - **[GitHub](https://lobechat.com/discover/provider/github)**: 通过 GitHub 模型,开发人员可以成为 AI 工程师,并使用行业领先的 AI 模型进行构建。
同时,我们也在计划支持更多的模型服务商,如 Replicate 和 Perplexity 等,以进一步丰富我们的服务商库。如果你希望让 LobeChat 支持你喜爱的服务商,欢迎加入我们的[社区讨论](https://github.com/lobehub/lobe-chat/discussions/1284)。 <details><summary><kbd>See more providers (+27)</kbd></summary>
- **[Novita](https://lobechat.com/discover/provider/novita)**: Novita AI 是一个提供多种大语言模型与 AI 图像生成的 API 服务的平台,灵活、可靠且具有成本效益。它支持 Llama3、Mistral 等最新的开源模型,并为生成式 AI 应用开发提供了全面、用户友好且自动扩展的 API 解决方案,适合 AI 初创公司的快速发展。
- **[PPIO](https://lobechat.com/discover/provider/ppio)**: PPIO 派欧云提供稳定、高性价比的开源模型 API 服务,支持 DeepSeek 全系列、Llama、Qwen 等行业领先大模型。
- **[Together AI](https://lobechat.com/discover/provider/togetherai)**: Together AI 致力于通过创新的 AI 模型实现领先的性能,提供广泛的自定义能力,包括快速扩展支持和直观的部署流程,满足企业的各种需求。
- **[Fireworks AI](https://lobechat.com/discover/provider/fireworksai)**: Fireworks AI 是一家领先的高级语言模型服务商,专注于功能调用和多模态处理。其最新模型 Firefunction V2 基于 Llama-3优化用于函数调用、对话及指令跟随。视觉语言模型 FireLLaVA-13B 支持图像和文本混合输入。其他 notable 模型包括 Llama 系列和 Mixtral 系列,提供高效的多语言指令跟随与生成支持。
- **[Groq](https://lobechat.com/discover/provider/groq)**: Groq 的 LPU 推理引擎在最新的独立大语言模型LLM基准测试中表现卓越以其惊人的速度和效率重新定义了 AI 解决方案的标准。Groq 是一种即时推理速度的代表,在基于云的部署中展现了良好的性能。
- **[Perplexity](https://lobechat.com/discover/provider/perplexity)**: Perplexity 是一家领先的对话生成模型提供商,提供多种先进的 Llama 3.1 模型,支持在线和离线应用,特别适用于复杂的自然语言处理任务。
- **[Mistral](https://lobechat.com/discover/provider/mistral)**: Mistral 提供先进的通用、专业和研究型模型,广泛应用于复杂推理、多语言任务、代码生成等领域,通过功能调用接口,用户可以集成自定义功能,实现特定应用。
- **[Ai21Labs](https://lobechat.com/discover/provider/ai21)**: AI21 Labs 为企业构建基础模型和人工智能系统,加速生成性人工智能在生产中的应用。
- **[Upstage](https://lobechat.com/discover/provider/upstage)**: Upstage 专注于为各种商业需求开发 AI 模型,包括 Solar LLM 和文档 AI旨在实现工作的人造通用智能AGI。通过 Chat API 创建简单的对话代理,并支持功能调用、翻译、嵌入以及特定领域应用。
- **[xAI](https://lobechat.com/discover/provider/xai)**: xAI 是一家致力于构建人工智能以加速人类科学发现的公司。我们的使命是推动我们对宇宙的共同理解。
- **[Qwen](https://lobechat.com/discover/provider/qwen)**: 通义千问是阿里云自主研发的超大规模语言模型,具有强大的自然语言理解和生成能力。它可以回答各种问题、创作文字内容、表达观点看法、撰写代码等,在多个领域发挥作用。
- **[Wenxin](https://lobechat.com/discover/provider/wenxin)**: 企业级一站式大模型与 AI 原生应用开发及服务平台,提供最全面易用的生成式人工智能模型开发、应用开发全流程工具链
- **[Hunyuan](https://lobechat.com/discover/provider/hunyuan)**: 由腾讯研发的大语言模型,具备强大的中文创作能力,复杂语境下的逻辑推理能力,以及可靠的任务执行能力
- **[ZhiPu](https://lobechat.com/discover/provider/zhipu)**: 智谱 AI 提供多模态与语言模型的开放平台,支持广泛的 AI 应用场景,包括文本处理、图像理解与编程辅助等。
- **[SiliconCloud](https://lobechat.com/discover/provider/siliconcloud)**: SiliconCloud基于优秀开源基础模型的高性价比 GenAI 云服务
- **[01.AI](https://lobechat.com/discover/provider/zeroone)**: 零一万物致力于推动以人为本的 AI 2.0 技术革命,旨在通过大语言模型创造巨大的经济和社会价值,并开创新的 AI 生态与商业模式。
- **[Spark](https://lobechat.com/discover/provider/spark)**: 科大讯飞星火大模型提供多领域、多语言的强大 AI 能力,利用先进的自然语言处理技术,构建适用于智能硬件、智慧医疗、智慧金融等多种垂直场景的创新应用。
- **[SenseNova](https://lobechat.com/discover/provider/sensenova)**: 商汤日日新,依托商汤大装置的强大的基础支撑,提供高效易用的全栈大模型服务。
- **[Stepfun](https://lobechat.com/discover/provider/stepfun)**: 阶级星辰大模型具备行业领先的多模态及复杂推理能力,支持超长文本理解和强大的自主调度搜索引擎功能。
- **[Moonshot](https://lobechat.com/discover/provider/moonshot)**: Moonshot 是由北京月之暗面科技有限公司推出的开源平台,提供多种自然语言处理模型,应用领域广泛,包括但不限于内容创作、学术研究、智能推荐、医疗诊断等,支持长文本处理和复杂生成任务。
- **[Baichuan](https://lobechat.com/discover/provider/baichuan)**: 百川智能是一家专注于人工智能大模型研发的公司,其模型在国内知识百科、长文本处理和生成创作等中文任务上表现卓越,超越了国外主流模型。百川智能还具备行业领先的多模态能力,在多项权威评测中表现优异。其模型包括 Baichuan 4、Baichuan 3 Turbo 和 Baichuan 3 Turbo 128k 等,分别针对不同应用场景进行优化,提供高性价比的解决方案。
- **[Minimax](https://lobechat.com/discover/provider/minimax)**: MiniMax 是 2021 年成立的通用人工智能科技公司致力于与用户共创智能。MiniMax 自主研发了不同模态的通用大模型,其中包括万亿参数的 MoE 文本大模型、语音大模型以及图像大模型。并推出了海螺 AI 等应用。
- **[InternLM](https://lobechat.com/discover/provider/internlm)**: 致力于大模型研究与开发工具链的开源组织。为所有 AI 开发者提供高效、易用的开源平台,让最前沿的大模型与算法技术触手可及
- **[Higress](https://lobechat.com/discover/provider/higress)**: Higress 是一款云原生 API 网关,在阿里内部为解决 Tengine reload 对长连接业务有损,以及 gRPC/Dubbo 负载均衡能力不足而诞生。
- **[Gitee AI](https://lobechat.com/discover/provider/giteeai)**: Gitee AI 的 Serverless API 为 AI 开发者提供开箱即用的大模型推理 API 服务。
- **[Taichu](https://lobechat.com/discover/provider/taichu)**: 中科院自动化研究所和武汉人工智能研究院推出新一代多模态大模型支持多轮问答、文本创作、图像生成、3D 理解、信号分析等全面问答任务,拥有更强的认知、理解、创作能力,带来全新互动体验。
- **[360 AI](https://lobechat.com/discover/provider/ai360)**: 360 AI 是 360 公司推出的 AI 模型和服务平台,提供多种先进的自然语言处理模型,包括 360GPT2 Pro、360GPT Pro、360GPT Turbo 和 360GPT Turbo Responsibility 8K。这些模型结合了大规模参数和多模态能力广泛应用于文本生成、语义理解、对话系统与代码生成等领域。通过灵活的定价策略360 AI 满足多样化用户需求,支持开发者集成,推动智能化应用的革新和发展。
</details>
> 📊 Total providers: [<kbd>**37**</kbd>](https://lobechat.com/discover/providers)
<!-- PROVIDER LIST -->
同时,我们也在计划支持更多的模型服务商,以进一步丰富我们的服务商库。如果你希望让 LobeChat 支持你喜爱的服务商,欢迎加入我们的 [💬 社区讨论](https://github.com/lobehub/lobe-chat/discussions/6157)。
<div align="right"> <div align="right">
@ -164,7 +242,7 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
[![][image-feat-local]][docs-feat-local] [![][image-feat-local]][docs-feat-local]
### `3` [支持本地大语言模型 (LLM)][docs-feat-local] ### `6` [支持本地大语言模型 (LLM)][docs-feat-local]
为了满足特定用户的需求LobeChat 还基于 [Ollama](https://ollama.ai) 支持了本地模型的使用,让用户能够更灵活地使用自己的或第三方的模型。 为了满足特定用户的需求LobeChat 还基于 [Ollama](https://ollama.ai) 支持了本地模型的使用,让用户能够更灵活地使用自己的或第三方的模型。
@ -180,7 +258,7 @@ LobeChat 支持文件上传与知识库功能,你可以上传文件、图片
[![][image-feat-vision]][docs-feat-vision] [![][image-feat-vision]][docs-feat-vision]
### `4` [模型视觉识别 (Model Visual)][docs-feat-vision] ### `7` [模型视觉识别 (Model Visual)][docs-feat-vision]
LobeChat 已经支持 OpenAI 最新的 [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) 支持视觉识别的模型,这是一个具备视觉识别能力的多模态应用。 LobeChat 已经支持 OpenAI 最新的 [`gpt-4-vision`](https://platform.openai.com/docs/guides/vision) 支持视觉识别的模型,这是一个具备视觉识别能力的多模态应用。
用户可以轻松上传图片或者拖拽图片到对话框中,助手将能够识别图片内容,并在此基础上进行智能对话,构建更智能、更多元化的聊天场景。 用户可以轻松上传图片或者拖拽图片到对话框中,助手将能够识别图片内容,并在此基础上进行智能对话,构建更智能、更多元化的聊天场景。
@ -195,7 +273,7 @@ LobeChat 已经支持 OpenAI 最新的 [`gpt-4-vision`](https://platform.openai.
[![][image-feat-tts]][docs-feat-tts] [![][image-feat-tts]][docs-feat-tts]
### `5` [TTS & STT 语音会话][docs-feat-tts] ### `8` [TTS & STT 语音会话][docs-feat-tts]
LobeChat 支持文字转语音Text-to-SpeechTTS和语音转文字Speech-to-TextSTT技术这使得我们的应用能够将文本信息转化为清晰的语音输出用户可以像与真人交谈一样与我们的对话助手进行交流。 LobeChat 支持文字转语音Text-to-SpeechTTS和语音转文字Speech-to-TextSTT技术这使得我们的应用能够将文本信息转化为清晰的语音输出用户可以像与真人交谈一样与我们的对话助手进行交流。
用户可以从多种声音中选择,给助手搭配合适的音源。 同时对于那些倾向于听觉学习或者想要在忙碌中获取信息的用户来说TTS 提供了一个极佳的解决方案。 用户可以从多种声音中选择,给助手搭配合适的音源。 同时对于那些倾向于听觉学习或者想要在忙碌中获取信息的用户来说TTS 提供了一个极佳的解决方案。
@ -210,7 +288,7 @@ LobeChat 支持文字转语音Text-to-SpeechTTS和语音转文字Spe
[![][image-feat-t2i]][docs-feat-t2i] [![][image-feat-t2i]][docs-feat-t2i]
### `6` [Text to Image 文生图][docs-feat-t2i] ### `9` [Text to Image 文生图][docs-feat-t2i]
支持最新的文本到图片生成技术LobeChat 现在能够让用户在与助手对话中直接调用文生图工具进行创作。 支持最新的文本到图片生成技术LobeChat 现在能够让用户在与助手对话中直接调用文生图工具进行创作。
通过利用 [`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/) 和 [`Pollinations`](https://pollinations.ai/) 等 AI 工具的能力, 助手们现在可以将你的想法转化为图像。 通过利用 [`DALL-E 3`](https://openai.com/dall-e-3)、[`MidJourney`](https://www.midjourney.com/) 和 [`Pollinations`](https://pollinations.ai/) 等 AI 工具的能力, 助手们现在可以将你的想法转化为图像。
@ -224,7 +302,7 @@ LobeChat 支持文字转语音Text-to-SpeechTTS和语音转文字Spe
[![][image-feat-plugin]][docs-feat-plugin] [![][image-feat-plugin]][docs-feat-plugin]
### `7` [插件系统 (Tools Calling)][docs-feat-plugin] ### `10` [插件系统 (Tools Calling)][docs-feat-plugin]
LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地增强了 ChatGPT 的实用性和灵活性。 LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地增强了 ChatGPT 的实用性和灵活性。
@ -238,14 +316,14 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
<!-- PLUGIN LIST --> <!-- PLUGIN LIST -->
| 最近新增 | 插件描述 | | 最近新增 | 描述 |
| ---------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------ | | -------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------- |
| [通义万象图像生成器](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **YoungTx** on **2024-08-09**</sup> | 此插件使用阿里巴巴的通义万象模型根据文本提示生成图像。<br/>`图像` `通义` `万象` | | [网页](https://lobechat.com/discover/plugin/web)<br/><sup>By **Proghit** on **2025-01-24**</sup> | 智能网页搜索,读取和分析页面,以提供来自 Google 结果的全面答案。<br/>`网页` `搜索` |
| [购物工具](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **shoppingtools** on **2024-07-19**</sup> | 在 eBay 和 AliExpress 上搜索产品,查找 eBay 活动和优惠券。获取快速示例。<br/>`购物` `e-bay` `ali-express` `优惠券` | | [MintbaseSearch](https://lobechat.com/discover/plugin/mintbasesearch)<br/><sup>By **mintbase** on **2024-12-31**</sup> | 在 NEAR 协议上查找任何 NFT 数据。<br/>`加密货币` `nft` |
| [Savvy Trader AI](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **savvytrader** on **2024-06-27**</sup> | 实时股票、加密货币和其他投资数据。<br/>`股票` `分析` | | [必应网页搜索](https://lobechat.com/discover/plugin/Bingsearch-identifier)<br/><sup>By **FineHow** on **2024-12-22**</sup> | 通过 BingApi 搜索互联网上的信息<br/>`bingsearch` |
| [Search1API](https://chat-preview.lobehub.com/settings/agent)<br/><sup>By **fatwang2** on **2024-05-06**</sup> | 搜索聚合服务,专为 LLMs 设计<br/>`web` `search` | | [PortfolioMeta](https://lobechat.com/discover/plugin/StockData)<br/><sup>By **portfoliometa** on **2024-12-22**</sup> | 分析股票并获取全面的实时投资数据和分析。<br/>`股票` |
> 📊 Total plugins: [<kbd>**50**</kbd>](https://github.com/lobehub/lobe-chat-plugins) > 📊 Total plugins: [<kbd>**46**</kbd>](https://lobechat.com/discover/plugins)
<!-- PLUGIN LIST --> <!-- PLUGIN LIST -->
@ -257,7 +335,7 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
[![][image-feat-agent]][docs-feat-agent] [![][image-feat-agent]][docs-feat-agent]
### `8` [助手市场 (GPTs)][docs-feat-agent] ### `11` [助手市场 (GPTs)][docs-feat-agent]
在 LobeChat 的助手市场中,创作者们可以发现一个充满活力和创新的社区,它汇聚了众多精心设计的助手,这些助手不仅在工作场景中发挥着重要作用,也在学习过程中提供了极大的便利。 在 LobeChat 的助手市场中,创作者们可以发现一个充满活力和创新的社区,它汇聚了众多精心设计的助手,这些助手不仅在工作场景中发挥着重要作用,也在学习过程中提供了极大的便利。
我们的市场不仅是一个展示平台,更是一个协作的空间。在这里,每个人都可以贡献自己的智慧,分享个人开发的助手。 我们的市场不仅是一个展示平台,更是一个协作的空间。在这里,每个人都可以贡献自己的智慧,分享个人开发的助手。
@ -273,14 +351,14 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
<!-- AGENT LIST --> <!-- AGENT LIST -->
| 最近新增 | 助手说明 | | 最近新增 | 描述 |
| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------- |
| [合同条款精炼师 v1.0](https://chat-preview.lobehub.com/market?agent=business-contract)<br/><sup>By **[houhoufm](https://github.com/houhoufm)** on **2024-09-24**</sup> | 输出: {优化合同条款,专业简洁表达}<br/>`合同优化` `法律咨询` `文案撰写` `专业术语` `项目管理` | | [审稿回复专家](https://lobechat.com/discover/assistant/academic-paper-overview)<br/><sup>By **[arvinxx](https://github.com/arvinxx)** on **2025-03-11**</sup> | 擅长高质量文献检索与分析的学术研究助手<br/>`学术研究` `文献检索` `数据分析` `信息提取` `咨询` |
| [会议助手 v1.0](https://chat-preview.lobehub.com/market?agent=meeting)<br/><sup>By **[houhoufm](https://github.com/houhoufm)** on **2024-09-24**</sup> | 专业会议汇报助手,提炼会议要点成汇报句子<br/>`会议汇报` `撰写` `沟通` `工作流程` `专业技能` | | [Cron 表达式助手](https://lobechat.com/discover/assistant/crontab-generate)<br/><sup>By **[edgesider](https://github.com/edgesider)** on **2025-02-17**</sup> | Crontab 表达式生成<br/>`crontab` `时间表达` `触发时间` `生成器` `技术辅助` |
| [稳定专辑封面提示生成器](https://chat-preview.lobehub.com/market?agent=title-bpm-stimmung)<br/><sup>By **[MellowTrixX](https://github.com/MellowTrixX)** on **2024-09-24**</sup> | 专业的平面设计师,专注于为旋律科技音乐专辑创建视觉概念和设计。<br/>`专辑封面` `提示` `稳定扩散` `封面设计` `封面提示` | | [小智法语翻译助手](https://lobechat.com/discover/assistant/xiao-zhi-french-translation-asst-v-1)<br/><sup>By **[WeR-Best](https://github.com/WeR-Best)** on **2025-02-10**</sup> | 友好、专业、富有同理心的法语翻译 AI 助手<br/>`ai助手` `法语翻译` `跨文化交流` `创造力` |
| [广告文案创作大师](https://chat-preview.lobehub.com/market?agent=advertising-copywriting-master)<br/><sup>By **[leter](https://github.com/leter)** on **2024-09-23**</sup> | 擅长产品功能分析与用户价值观广告文案创作<br/>`广告文案` `用户价值观` `营销策略` | | [投资小助手](https://lobechat.com/discover/assistant/graham-investmentassi)<br/><sup>By **[farsightlin](https://github.com/farsightlin)** on **2025-02-06**</sup> | 帮助用户计算估值所需的一些数据<br/>`投资` `估值` `财务分析` `计算器` |
> 📊 Total agents: [<kbd>**392**</kbd> ](https://github.com/lobehub/lobe-chat-agents) > 📊 Total agents: [<kbd>**488**</kbd> ](https://lobechat.com/discover/assistants)
<!-- AGENT LIST --> <!-- AGENT LIST -->
@ -292,7 +370,7 @@ LobeChat 的插件生态系统是其核心功能的重要扩展,它极大地
[![][image-feat-database]][docs-feat-database] [![][image-feat-database]][docs-feat-database]
### `9` [支持本地 / 远程数据库][docs-feat-database] ### `12` [支持本地 / 远程数据库][docs-feat-database]
LobeChat 支持同时使用服务端数据库和本地数据库。根据您的需求,您可以选择合适的部署方案: LobeChat 支持同时使用服务端数据库和本地数据库。根据您的需求,您可以选择合适的部署方案:
@ -309,7 +387,7 @@ LobeChat 支持同时使用服务端数据库和本地数据库。根据您的
[![][image-feat-auth]][docs-feat-auth] [![][image-feat-auth]][docs-feat-auth]
### `10` [支持多用户管理][docs-feat-auth] ### `13` [支持多用户管理][docs-feat-auth]
LobeChat 支持多用户管理,提供了两种主要的用户认证和管理方案,以满足不同需求: LobeChat 支持多用户管理,提供了两种主要的用户认证和管理方案,以满足不同需求:
@ -327,7 +405,7 @@ LobeChat 支持多用户管理,提供了两种主要的用户认证和管理
[![][image-feat-pwa]][docs-feat-pwa] [![][image-feat-pwa]][docs-feat-pwa]
### `11` [渐进式 Web 应用 (PWA)][docs-feat-pwa] ### `14` [渐进式 Web 应用 (PWA)][docs-feat-pwa]
我们深知在当今多设备环境下为用户提供无缝体验的重要性。为此,我们采用了渐进式 Web 应用 [PWA](https://support.google.com/chrome/answer/9658361) 技术, 我们深知在当今多设备环境下为用户提供无缝体验的重要性。为此,我们采用了渐进式 Web 应用 [PWA](https://support.google.com/chrome/answer/9658361) 技术,
这是一种能够将网页应用提升至接近原生应用体验的现代 Web 技术。通过 PWALobeChat 能够在桌面和移动设备上提供高度优化的用户体验,同时保持轻量级和高性能的特点。 这是一种能够将网页应用提升至接近原生应用体验的现代 Web 技术。通过 PWALobeChat 能够在桌面和移动设备上提供高度优化的用户体验,同时保持轻量级和高性能的特点。
@ -350,7 +428,7 @@ LobeChat 支持多用户管理,提供了两种主要的用户认证和管理
[![][image-feat-mobile]][docs-feat-mobile] [![][image-feat-mobile]][docs-feat-mobile]
### `12` [移动设备适配][docs-feat-mobile] ### `15` [移动设备适配][docs-feat-mobile]
针对移动设备进行了一系列的优化设计,以提升用户的移动体验。目前,我们正在对移动端的用户体验进行版本迭代,以实现更加流畅和直观的交互。如果您有任何建议或想法,我们非常欢迎您通过 GitHub Issues 或者 Pull Requests 提供反馈。 针对移动设备进行了一系列的优化设计,以提升用户的移动体验。目前,我们正在对移动端的用户体验进行版本迭代,以实现更加流畅和直观的交互。如果您有任何建议或想法,我们非常欢迎您通过 GitHub Issues 或者 Pull Requests 提供反馈。
@ -362,7 +440,7 @@ LobeChat 支持多用户管理,提供了两种主要的用户认证和管理
[![][image-feat-theme]][docs-feat-theme] [![][image-feat-theme]][docs-feat-theme]
### `13` [自定义主题][docs-feat-theme] ### `16` [自定义主题][docs-feat-theme]
作为设计工程师出身LobeChat 在界面设计上充分考虑用户的个性化体验,因此引入了灵活多变的主题模式,其中包括日间的亮色模式和夜间的深色模式。 作为设计工程师出身LobeChat 在界面设计上充分考虑用户的个性化体验,因此引入了灵活多变的主题模式,其中包括日间的亮色模式和夜间的深色模式。
除了主题模式的切换,还提供了一系列的颜色定制选项,允许用户根据自己的喜好来调整应用的主题色彩。无论是想要沉稳的深蓝,还是希望活泼的桃粉,或者是专业的灰白,用户都能够在 LobeChat 中找到匹配自己风格的颜色选择。 除了主题模式的切换,还提供了一系列的颜色定制选项,允许用户根据自己的喜好来调整应用的主题色彩。无论是想要沉稳的深蓝,还是希望活泼的桃粉,或者是专业的灰白,用户都能够在 LobeChat 中找到匹配自己风格的颜色选择。
@ -377,7 +455,7 @@ LobeChat 支持多用户管理,提供了两种主要的用户认证和管理
</div> </div>
### 更多特性 ### `*` 更多特性
除了上述功能特性以外LobeChat 所具有的设计和技术能力将为你带来更多使用保障: 除了上述功能特性以外LobeChat 所具有的设计和技术能力将为你带来更多使用保障:
@ -426,9 +504,9 @@ LobeChat 提供了 Vercel 的 自托管版本 和 [Docker 镜像][docker-release
> >
> 完整教程请查阅 [📘 构建属于自己的 Lobe Chat][docs-self-hosting] > 完整教程请查阅 [📘 构建属于自己的 Lobe Chat][docs-self-hosting]
### `A` 使用 Vercel、Zeabur 或 Sealos 部署 ### `A` 使用 Vercel、Zeabur 、Sealos 或 阿里云计算巢 部署
如果想在 Vercel 或 Zeabur 上部署该服务,可以按照以下步骤进行操作: 如果想在 Vercel 、 Zeabur 或 阿里云 上部署该服务,可以按照以下步骤进行操作:
- 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys) 。 - 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys) 。
- 点击下方按钮开始部署: 直接使用 GitHub 账号登录即可,记得在环境变量页填入 `OPENAI_API_KEY` (必填) and `ACCESS_CODE`(推荐); - 点击下方按钮开始部署: 直接使用 GitHub 账号登录即可,记得在环境变量页填入 `OPENAI_API_KEY` (必填) and `ACCESS_CODE`(推荐);
@ -437,9 +515,9 @@ LobeChat 提供了 Vercel 的 自托管版本 和 [Docker 镜像][docker-release
<div align="center"> <div align="center">
| 使用 Vercel 部署 | 使用 Zeabur 部署 | 使用 Sealos 部署 | | 使用 Vercel 部署 | 使用 Zeabur 部署 | 使用 Sealos 部署 | 使用阿里云计算巢部署 |
| :-------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------: | | :-------------------------------------: | :---------------------------------------------------------: | :---------------------------------------------------------: | :-----------------------------------------------------------------------: |
| [![][deploy-button-image]][deploy-link] | [![][deploy-on-zeabur-button-image]][deploy-on-zeabur-link] | [![][deploy-on-sealos-button-image]][deploy-on-sealos-link] | | [![][deploy-button-image]][deploy-link] | [![][deploy-on-zeabur-button-image]][deploy-on-zeabur-link] | [![][deploy-on-sealos-button-image]][deploy-on-sealos-link] | [![][deploy-on-alibaba-cloud-button-image]][deploy-on-alibaba-cloud-link] |
</div> </div>
@ -463,27 +541,24 @@ LobeChat 提供了 Vercel 的 自托管版本 和 [Docker 镜像][docker-release
[![][docker-size-shield]][docker-size-link] [![][docker-size-shield]][docker-size-link]
[![][docker-pulls-shield]][docker-pulls-link] [![][docker-pulls-shield]][docker-pulls-link]
我们提供了 Docker 镜像,供你在自己的私有设备上部署 LobeChat 服务。使用以下命令即可使用一键启动 LobeChat 服务: We provide a Docker image for deploying the LobeChat service on your own private device. Use the following command to start the LobeChat service:
1. create a folder to for storage files
```fish ```fish
$ docker run -d -p 3210:3210 \ $ mkdir lobe-chat-db && cd lobe-chat-db
-e OPENAI_API_KEY=sk-xxxx \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
``` ```
> \[!TIP] 2. 启动一键脚本
>
> 如果你需要通过代理使用 OpenAI 服务,你可以使用 `OPENAI_PROXY_URL` 环境变量来配置代理地址: ```fish
bash <(curl -fsSL https://lobe.li/setup.sh) -l zh_CN
```
3. 启动 LobeChat
```fish ```fish
$ docker run -d -p 3210:3210 \ docker compose up -d
-e OPENAI_API_KEY=sk-xxxx \
-e OPENAI_PROXY_URL=https://api-proxy.com/v1 \
-e ACCESS_CODE=lobe66 \
--name lobe-chat \
lobehub/lobe-chat
``` ```
> \[!NOTE] > \[!NOTE]
@ -713,10 +788,11 @@ This project is [Apache 2.0](./LICENSE) licensed.
[back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square [back-to-top]: https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square
[blog]: https://lobehub.com/zh/blog [blog]: https://lobehub.com/zh/blog
[changelog]: https://lobehub.com/changelog
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg [chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html [chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/lobechat_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg [chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/chat_preview_lobehub_com_chat.html [chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/lobechat_com_chat.html
[chat-plugin-sdk]: https://github.com/lobehub/chat-plugin-sdk [chat-plugin-sdk]: https://github.com/lobehub/chat-plugin-sdk
[chat-plugin-template]: https://github.com/lobehub/chat-plugin-template [chat-plugin-template]: https://github.com/lobehub/chat-plugin-template
[chat-plugins-gateway]: https://github.com/lobehub/chat-plugins-gateway [chat-plugins-gateway]: https://github.com/lobehub/chat-plugins-gateway
@ -726,25 +802,30 @@ This project is [Apache 2.0](./LICENSE) licensed.
[codespaces-shield]: https://github.com/codespaces/badge.svg [codespaces-shield]: https://github.com/codespaces/badge.svg
[deploy-button-image]: https://vercel.com/button [deploy-button-image]: https://vercel.com/button
[deploy-link]: https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat&env=OPENAI_API_KEY,ACCESS_CODE&envDescription=Find%20your%20OpenAI%20API%20Key%20by%20click%20the%20right%20Learn%20More%20button.%20%7C%20Access%20Code%20can%20protect%20your%20website&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=lobe-chat&repository-name=lobe-chat [deploy-link]: https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Flobehub%2Flobe-chat&env=OPENAI_API_KEY,ACCESS_CODE&envDescription=Find%20your%20OpenAI%20API%20Key%20by%20click%20the%20right%20Learn%20More%20button.%20%7C%20Access%20Code%20can%20protect%20your%20website&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=lobe-chat&repository-name=lobe-chat
[deploy-on-alibaba-cloud-button-image]: https://service-info-public.oss-cn-hangzhou.aliyuncs.com/computenest-en.svg
[deploy-on-alibaba-cloud-link]: https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=LobeChat%E7%A4%BE%E5%8C%BA%E7%89%88
[deploy-on-sealos-button-image]: https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg [deploy-on-sealos-button-image]: https://raw.githubusercontent.com/labring-actions/templates/main/Deploy-on-Sealos.svg
[deploy-on-sealos-link]: https://cloud.sealos.io/?openapp=system-template%3FtemplateName%3Dlobe-chat [deploy-on-sealos-link]: https://template.hzh.sealos.run/deploy?templateName=lobe-chat-db
[deploy-on-zeabur-button-image]: https://zeabur.com/button.svg [deploy-on-zeabur-button-image]: https://zeabur.com/button.svg
[deploy-on-zeabur-link]: https://zeabur.com/templates/VZGGTI [deploy-on-zeabur-link]: https://zeabur.com/templates/VZGGTI
[discord-link]: https://discord.gg/AYFPHvv2jT [discord-link]: https://discord.gg/AYFPHvv2jT
[discord-shield]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=flat-square [discord-shield]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=flat-square
[discord-shield-badge]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=for-the-badge [discord-shield-badge]: https://img.shields.io/discord/1127171173982154893?color=5865F2&label=discord&labelColor=black&logo=discord&logoColor=white&style=for-the-badge
[docker-pulls-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-pulls-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-pulls-shield]: https://img.shields.io/docker/pulls/lobehub/lobe-chat?color=45cc11&labelColor=black&style=flat-square [docker-pulls-shield]: https://img.shields.io/docker/pulls/lobehub/lobe-chat?color=45cc11&labelColor=black&style=flat-square&sort=semver
[docker-release-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-release-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-release-shield]: https://img.shields.io/docker/v/lobehub/lobe-chat?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square [docker-release-shield]: https://img.shields.io/docker/v/lobehub/lobe-chat-database?color=369eff&label=docker&labelColor=black&logo=docker&logoColor=white&style=flat-square&sort=semver
[docker-size-link]: https://hub.docker.com/r/lobehub/lobe-chat [docker-size-link]: https://hub.docker.com/r/lobehub/lobe-chat-database
[docker-size-shield]: https://img.shields.io/docker/image-size/lobehub/lobe-chat?color=369eff&labelColor=black&style=flat-square [docker-size-shield]: https://img.shields.io/docker/image-size/lobehub/lobe-chat-database?color=369eff&labelColor=black&style=flat-square&sort=semver
[docs]: https://lobehub.com/zh/docs/usage/start [docs]: https://lobehub.com/zh/docs/usage/start
[docs-dev-guide]: https://github.com/lobehub/lobe-chat/wiki/index [docs-dev-guide]: https://github.com/lobehub/lobe-chat/wiki/index
[docs-docker]: https://lobehub.com/docs/self-hosting/platform/docker [docs-docker]: https://lobehub.com/zh/docs/self-hosting/server-database/docker-compose
[docs-env-var]: https://lobehub.com/docs/self-hosting/environment-variables [docs-env-var]: https://lobehub.com/docs/self-hosting/environment-variables
[docs-feat-agent]: https://lobehub.com/docs/usage/features/agent-market [docs-feat-agent]: https://lobehub.com/docs/usage/features/agent-market
[docs-feat-artifacts]: https://lobehub.com/docs/usage/features/artifacts
[docs-feat-auth]: https://lobehub.com/docs/usage/features/auth [docs-feat-auth]: https://lobehub.com/docs/usage/features/auth
[docs-feat-branch]: https://lobehub.com/docs/usage/features/branching-conversations
[docs-feat-cot]: https://lobehub.com/docs/usage/features/cot
[docs-feat-database]: https://lobehub.com/docs/usage/features/database [docs-feat-database]: https://lobehub.com/docs/usage/features/database
[docs-feat-knowledgebase]: https://lobehub.com/blog/knowledge-base [docs-feat-knowledgebase]: https://lobehub.com/blog/knowledge-base
[docs-feat-local]: https://lobehub.com/docs/usage/features/local-llm [docs-feat-local]: https://lobehub.com/docs/usage/features/local-llm
@ -788,22 +869,25 @@ This project is [Apache 2.0](./LICENSE) licensed.
[github-stars-shield]: https://img.shields.io/github/stars/lobehub/lobe-chat?color=ffcb47&labelColor=black&style=flat-square [github-stars-shield]: https://img.shields.io/github/stars/lobehub/lobe-chat?color=ffcb47&labelColor=black&style=flat-square
[github-trending-shield]: https://trendshift.io/api/badge/repositories/2256 [github-trending-shield]: https://trendshift.io/api/badge/repositories/2256
[github-trending-url]: https://trendshift.io/repositories/2256 [github-trending-url]: https://trendshift.io/repositories/2256
[image-banner]: https://github.com/lobehub/lobe-chat/assets/28616219/9f155dff-4737-429f-9cad-a70a1a860c5f [image-banner]: https://github.com/user-attachments/assets/6f293c7f-47b4-47eb-9202-fe68a942d35b
[image-feat-agent]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670869-f1ffbf66-42b6-42cf-a937-9ce1f8328514.png [image-feat-agent]: https://github.com/user-attachments/assets/b3ab6e35-4fbc-468d-af10-e3e0c687350f
[image-feat-auth]: https://github.com/lobehub/lobe-chat/assets/17870709/8ce70e15-40df-451e-b700-66090fe5b8c2 [image-feat-artifacts]: https://github.com/user-attachments/assets/7f95fad6-b210-4e6e-84a0-7f39e96f3a00
[image-feat-database]: https://github.com/lobehub/lobe-chat/assets/17870709/c27a0234-a4e9-40e5-8bcb-42d5ce7e40f9 [image-feat-auth]: https://github.com/user-attachments/assets/80bb232e-19d1-4f97-98d6-e291f3585e6d
[image-feat-knowledgebase]: https://github.com/user-attachments/assets/77e58e1c-c82f-4341-b159-f4eeede9967f [image-feat-branch]: https://github.com/user-attachments/assets/92f72082-02bd-4835-9c54-b089aad7fd41
[image-feat-local]: https://github.com/lobehub/lobe-chat/assets/28616219/ca9a21bc-ea6c-4c90-bf4a-fa53b4fb2b5c [image-feat-cot]: https://github.com/user-attachments/assets/f74f1139-d115-4e9c-8c43-040a53797a5e
[image-feat-mobile]: https://gw.alipayobjects.com/zos/kitchen/R441AuFS4W/mobile.webp [image-feat-database]: https://github.com/user-attachments/assets/f1697c8b-d1fb-4dac-ba05-153c6295d91d
[image-feat-plugin]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/268670883-33c43a5c-a512-467e-855c-fa299548cce5.png [image-feat-knowledgebase]: https://github.com/user-attachments/assets/7da7a3b2-92fd-4630-9f4e-8560c74955ae
[image-feat-privoder]: https://github.com/lobehub/lobe-chat/assets/28616219/b164bc54-8ba2-4c1e-b2f2-f4d7f7e7a551 [image-feat-local]: https://github.com/user-attachments/assets/1239da50-d832-4632-a7ef-bd754c0f3850
[image-feat-pwa]: https://gw.alipayobjects.com/zos/kitchen/69x6bllkX3/pwa.webp [image-feat-mobile]: https://github.com/user-attachments/assets/32cf43c4-96bd-4a4c-bfb6-59acde6fe380
[image-feat-t2i]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/297746445-0ff762b9-aa08-4337-afb7-12f932b6efbb.png [image-feat-plugin]: https://github.com/user-attachments/assets/66a891ac-01b6-4e3f-b978-2eb07b489b1b
[image-feat-theme]: https://gw.alipayobjects.com/zos/kitchen/pvus1lo%26Z7/darkmode.webp [image-feat-privoder]: https://github.com/user-attachments/assets/e553e407-42de-4919-977d-7dbfcf44a821
[image-feat-tts]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072124-c9853d8d-f1b5-44a8-a305-45ebc0f6d19a.png [image-feat-pwa]: https://github.com/user-attachments/assets/9647f70f-b71b-43b6-9564-7cdd12d1c24d
[image-feat-vision]: https://github-production-user-asset-6210df.s3.amazonaws.com/17870709/284072129-382bdf30-e3d6-4411-b5a0-249710b8ba08.png [image-feat-t2i]: https://github.com/user-attachments/assets/708274a7-2458-494b-a6ec-b73dfa1fa7c2
[image-overview]: https://github.com/lobehub/lobe-chat/assets/17870709/56b95d48-f573-41cd-8b38-387bf88bc4bf [image-feat-theme]: https://github.com/user-attachments/assets/b47c39f1-806f-492b-8fcb-b0fa973937c1
[image-star]: https://github.com/lobehub/lobe-chat/assets/17870709/cb06b748-513f-47c2-8740-d876858d7855 [image-feat-tts]: https://github.com/user-attachments/assets/50189597-2cc3-4002-b4c8-756a52ad5c0a
[image-feat-vision]: https://github.com/user-attachments/assets/18574a1f-46c2-4cbc-af2c-35a86e128a07
[image-overview]: https://github.com/user-attachments/assets/dbfaa84a-2c82-4dd9-815c-5be616f264a4
[image-star]: https://github.com/user-attachments/assets/c3b482e7-cef5-4e94-bef9-226900ecfaab
[issues-link]: https://img.shields.io/github/issues/lobehub/lobe-chat.svg?style=flat [issues-link]: https://img.shields.io/github/issues/lobehub/lobe-chat.svg?style=flat
[lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins [lobe-chat-plugins]: https://github.com/lobehub/lobe-chat-plugins
[lobe-commit]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-commit [lobe-commit]: https://github.com/lobehub/lobe-commit/tree/master/packages/lobe-commit

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -2,64 +2,34 @@
#### TOC #### TOC
- [Welcome Page](#welcome-page)
- [Chat Page](#chat-page) - [Chat Page](#chat-page)
- [Market Page](#market-page) - [Discover Page](#discover-page)
- [Settings Page](#settings-page)
## Welcome Page
> **Info**\
> <https://chat-preview.lobehub.com/welcome>
| Desktop | Mobile |
| :---------------------------------------------: | :--------------------------------------------: |
| ![][welcome-desktop] | ![][welcome-mobile] |
| [⚡️ Lighthouse Report][welcome-desktop-report] | [⚡️ Lighthouse Report][welcome-mobile-report] |
## Chat Page ## Chat Page
> **Info**\ > **Info**\
> <https://chat-preview.lobehub.com/chat> > <https://lobechat.com/chat>
| Desktop | Mobile | | Desktop | Mobile |
| :------------------------------------------: | :-----------------------------------------: | | :------------------------------------------: | :-----------------------------------------: |
| ![][chat-desktop] | ![][chat-mobile] | | ![][chat-desktop] | ![][chat-mobile] |
| [⚡️ Lighthouse Report][chat-desktop-report] | [⚡️ Lighthouse Report][chat-mobile-report] | | [⚡️ Lighthouse Report][chat-desktop-report] | [⚡️ Lighthouse Report][chat-mobile-report] |
## Market Page ## Discover Page
> **Info**\
> <https://chat-preview.lobehub.com/market>
| Desktop | Mobile |
| :--------------------------------------------: | :-------------------------------------------: |
| ![][market-desktop] | ![][market-mobile] |
| [⚡️ Lighthouse Report][market-desktop-report] | [⚡️ Lighthouse Report][market-mobile-report] |
## Settings Page
> **Info**\ > **Info**\
> <https://chat-preview.lobehub.com/settings> > <https://lobechat.com/discover>
| Desktop | Mobile | | Desktop | Mobile |
| :----------------------------------------------: | :---------------------------------------------: | | :----------------------------------------------: | :---------------------------------------------: |
| ![][settings-desktop] | ![][settings-mobile] | | ![][discover-desktop] | ![][discover-mobile] |
| [⚡️ Lighthouse Report][settings-desktop-report] | [⚡️ Lighthouse Report][settings-mobile-report] | | [⚡️ Lighthouse Report][discover-desktop-report] | [⚡️ Lighthouse Report][discover-mobile-report] |
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg [chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html [chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/lobechat_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg [chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/chat_preview_lobehub_com_chat.html [chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/lobechat_com_chat.html
[market-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/market/desktop/pagespeed.svg [discover-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/desktop/pagespeed.svg
[market-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/market/desktop/chat_preview_lobehub_com_market.html [discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/lobechat_com_discover.html
[market-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/market/mobile/pagespeed.svg [discover-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/mobile/pagespeed.svg
[market-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/market/mobile/chat_preview_lobehub_com_market.html [discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/lobechat_com_discover.html
[settings-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/settings/desktop/pagespeed.svg
[settings-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/settings/desktop/chat_preview_lobehub_com_settings.html
[settings-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/settings/mobile/pagespeed.svg
[settings-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/settings/mobile/chat_preview_lobehub_com_settings.html
[welcome-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/welcome/desktop/pagespeed.svg
[welcome-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/welcome/desktop/chat_preview_lobehub_com_welcome.html
[welcome-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/welcome/mobile/pagespeed.svg
[welcome-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/welcome/mobile/chat_preview_lobehub_com_welcome.html

@ -2,64 +2,34 @@
#### TOC #### TOC
- [Welcome 欢迎页面](#welcome-欢迎页面)
- [Chat 聊天页面](#chat-聊天页面) - [Chat 聊天页面](#chat-聊天页面)
- [Market 市场页面](#market-市场页面) - [Discover 发现页面](#discover-发现页面)
- [Settings 设置页面](#settings-设置页面)
## Welcome 欢迎页面
> **Info**\
> <https://chat-preview.lobehub.com/welcome>
| Desktop | Mobile |
| :---------------------------------------------: | :--------------------------------------------: |
| ![][welcome-desktop] | ![][welcome-mobile] |
| [⚡️ Lighthouse Report][welcome-desktop-report] | [⚡️ Lighthouse Report][welcome-mobile-report] |
## Chat 聊天页面 ## Chat 聊天页面
> **Info**\ > **Info**\
> <https://chat-preview.lobehub.com/chat> > <https://lobechat.com/chat>
| Desktop | Mobile | | Desktop | Mobile |
| :------------------------------------------: | :-----------------------------------------: | | :------------------------------------------: | :-----------------------------------------: |
| ![][chat-desktop] | ![][chat-mobile] | | ![][chat-desktop] | ![][chat-mobile] |
| [⚡️ Lighthouse Report][chat-desktop-report] | [⚡️ Lighthouse Report][chat-mobile-report] | | [⚡️ Lighthouse Report][chat-desktop-report] | [⚡️ Lighthouse Report][chat-mobile-report] |
## Market 市场页面 ## Discover 发现页面
> **Info**\
> <https://chat-preview.lobehub.com/market>
| Desktop | Mobile |
| :--------------------------------------------: | :-------------------------------------------: |
| ![][market-desktop] | ![][market-mobile] |
| [⚡️ Lighthouse Report][market-desktop-report] | [⚡️ Lighthouse Report][market-mobile-report] |
## Settings 设置页面
> **Info**\ > **Info**\
> <https://chat-preview.lobehub.com/settings> > <https://lobechat.com/discover>
| Desktop | Mobile | | Desktop | Mobile |
| :----------------------------------------------: | :---------------------------------------------: | | :----------------------------------------------: | :---------------------------------------------: |
| ![][settings-desktop] | ![][settings-mobile] | | ![][discover-desktop] | ![][discover-mobile] |
| [⚡️ Lighthouse Report][settings-desktop-report] | [⚡️ Lighthouse Report][settings-mobile-report] | | [⚡️ Lighthouse Report][discover-desktop-report] | [⚡️ Lighthouse Report][discover-mobile-report] |
[chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg [chat-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/desktop/pagespeed.svg
[chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/chat_preview_lobehub_com_chat.html [chat-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/desktop/lobechat_com_chat.html
[chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg [chat-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/chat/mobile/pagespeed.svg
[chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/chat_preview_lobehub_com_chat.html [chat-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/chat/mobile/lobechat_com_chat.html
[market-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/market/desktop/pagespeed.svg [discover-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/desktop/pagespeed.svg
[market-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/market/desktop/chat_preview_lobehub_com_market.html [discover-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/desktop/lobechat_com_discover.html
[market-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/market/mobile/pagespeed.svg [discover-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/discover/mobile/pagespeed.svg
[market-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/market/mobile/chat_preview_lobehub_com_market.html [discover-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/discover/mobile/lobechat_com_discover.html
[settings-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/settings/desktop/pagespeed.svg
[settings-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/settings/desktop/chat_preview_lobehub_com_settings.html
[settings-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/settings/mobile/pagespeed.svg
[settings-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/settings/mobile/chat_preview_lobehub_com_settings.html
[welcome-desktop]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/welcome/desktop/pagespeed.svg
[welcome-desktop-report]: https://lobehub.github.io/lobe-chat/lighthouse/welcome/desktop/chat_preview_lobehub_com_welcome.html
[welcome-mobile]: https://raw.githubusercontent.com/lobehub/lobe-chat/lighthouse/lighthouse/welcome/mobile/pagespeed.svg
[welcome-mobile-report]: https://lobehub.github.io/lobe-chat/lighthouse/welcome/mobile/chat_preview_lobehub_com_welcome.html

@ -1,33 +0,0 @@
# Logto secret
LOGTO_CLIENT_ID=
LOGTO_CLIENT_SECRET=
# MinIO S3 configuration
MINIO_ROOT_USER=YOUR_MINIO_USER
MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD
# Configure the bucket information of MinIO
MINIO_LOBE_BUCKET=lobe
S3_ACCESS_KEY_ID=
S3_SECRET_ACCESS_KEY=
# Proxy, if you need it
# HTTP_PROXY=http://localhost:7890
# HTTPS_PROXY=http://localhost:7890
# Other environment variables, as needed. You can refer to the environment variables configuration for the client version, making sure not to have ACCESS_CODE.
# OPENAI_API_KEY=sk-xxxx
# OPENAI_PROXY_URL=https://api.openai.com/v1
# OPENAI_MODEL_LIST=...
# ----- Other config -----
# if no special requirements, no need to change
LOBE_PORT=3210
LOGTO_PORT=3001
MINIO_PORT=9000
# Postgres related, which are the necessary environment variables for DB
LOBE_DB_NAME=lobechat
POSTGRES_PASSWORD=uWNZugjBqixf8dxC

@ -1,33 +0,0 @@
# Logto secret
LOGTO_CLIENT_ID=
LOGTO_CLIENT_SECRET=
# MinIO S3 配置
MINIO_ROOT_USER=YOUR_MINIO_USER
MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD
# 在下方配置 minio 中添加的桶
MINIO_LOBE_BUCKET=lobe
S3_ACCESS_KEY_ID=
S3_SECRET_ACCESS_KEY=
# Proxy如果你需要的话比如你使用 GitHub 作为鉴权服务提供商)
# HTTP_PROXY=http://localhost:7890
# HTTPS_PROXY=http://localhost:7890
# 其他环境变量,视需求而定,可以参照客户端版本的环境变量配置,注意不要有 ACCESS_CODE
# OPENAI_API_KEY=sk-xxxx
# OPENAI_PROXY_URL=https://api.openai.com/v1
# OPENAI_MODEL_LIST=...
# ----- 相关配置 start -----
# 如没有特殊需要不用更改
LOBE_PORT=3210
LOGTO_PORT=3001
MINIO_PORT=9000
# Postgres 相关,也即 DB 必须的环境变量
LOBE_DB_NAME=lobechat
POSTGRES_PASSWORD=uWNZugjBqixf8dxC

@ -1,102 +0,0 @@
services:
network-service:
image: alpine
container_name: lobe-network
ports:
- '${MINIO_PORT}:${MINIO_PORT}' # MinIO API
- '9001:9001' # MinIO Console
- '${LOGTO_PORT}:${LOGTO_PORT}' # Logto
- '3002:3002' # Logto Admin
- '${LOBE_PORT}:3210' # LobeChat
command: tail -f /dev/null
networks:
- lobe-network
postgresql:
image: pgvector/pgvector:pg16
container_name: lobe-postgres
ports:
- "5432:5432"
volumes:
- './data:/var/lib/postgresql/data'
environment:
- 'POSTGRES_DB=${LOBE_DB_NAME}'
- 'POSTGRES_PASSWORD=${POSTGRES_PASSWORD}'
healthcheck:
test: ['CMD-SHELL', 'pg_isready -U postgres']
interval: 5s
timeout: 5s
retries: 5
restart: always
networks:
- lobe-network
minio:
image: minio/minio
container_name: lobe-minio
network_mode: 'service:network-service'
volumes:
- './s3_data:/etc/minio/data'
environment:
- 'MINIO_ROOT_USER=${MINIO_ROOT_USER}'
- 'MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD}'
- 'MINIO_API_CORS_ALLOW_ORIGIN=http://localhost:${LOBE_PORT}'
restart: always
command: >
server /etc/minio/data --address ":${MINIO_PORT}" --console-address ":9001"
logto:
image: svhd/logto
container_name: lobe-logto
network_mode: 'service:network-service'
depends_on:
postgresql:
condition: service_healthy
environment:
- 'TRUST_PROXY_HEADER=1'
- 'PORT=${LOGTO_PORT}'
- 'DB_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgresql:5432/logto'
- 'ENDPOINT=http://localhost:${LOGTO_PORT}'
- 'ADMIN_ENDPOINT=http://localhost:3002'
entrypoint: ['sh', '-c', 'npm run cli db seed -- --swe && npm start']
lobe:
image: lobehub/lobe-chat-database
container_name: lobe-database
network_mode: 'service:network-service'
depends_on:
postgresql:
condition: service_healthy
network-service:
condition: service_started
minio:
condition: service_started
logto:
condition: service_started
environment:
- 'APP_URL=http://localhost:3210'
- 'NEXT_AUTH_SSO_PROVIDERS=logto'
- 'KEY_VAULTS_SECRET=Kix2wcUONd4CX51E/ZPAd36BqM4wzJgKjPtz2sGztqQ='
- 'NEXT_AUTH_SECRET=NX2kaPE923dt6BL2U8e9oSre5RfoT7hg'
- 'NEXTAUTH_URL=http://localhost:${LOBE_PORT}/api/auth'
- 'LOGTO_ISSUER=http://localhost:${LOGTO_PORT}/oidc'
- 'DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgresql:5432/${LOBE_DB_NAME}'
- 'S3_ENDPOINT=http://localhost:${MINIO_PORT}'
- 'S3_BUCKET=${MINIO_LOBE_BUCKET}'
- 'S3_PUBLIC_DOMAIN=http://localhost:${MINIO_PORT}'
- 'S3_ENABLE_PATH_STYLE=1'
env_file:
- .env
restart: always
volumes:
data:
driver: local
s3_data:
driver: local
networks:
lobe-network:
driver: bridge

@ -16,20 +16,26 @@
LOBE_PORT=3210 LOBE_PORT=3210
CASDOOR_PORT=8000 CASDOOR_PORT=8000
MINIO_PORT=9000 MINIO_PORT=9000
APP_URL=http://localhost:3210
AUTH_URL=http://localhost:3210/api/auth
# Postgres related, which are the necessary environment variables for DB # Postgres related, which are the necessary environment variables for DB
LOBE_DB_NAME=lobechat LOBE_DB_NAME=lobechat
POSTGRES_PASSWORD=uWNZugjBqixf8dxC POSTGRES_PASSWORD=uWNZugjBqixf8dxC
AUTH_CASDOOR_ISSUER=http://localhost:8000
# Casdoor secret # Casdoor secret
AUTH_CASDOOR_ID=a387a4892ee19b1a2249 AUTH_CASDOOR_ID=a387a4892ee19b1a2249
AUTH_CASDOOR_SECRET=dbf205949d704de81b0b5b3603174e23fbecc354 AUTH_CASDOOR_SECRET=dbf205949d704de81b0b5b3603174e23fbecc354
# MinIO S3 configuration # MinIO S3 configuration
MINIO_ROOT_USER=YOUR_MINIO_USER MINIO_ROOT_USER=admin
MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD
# Configure the bucket information of MinIO # Configure the bucket information of MinIO
S3_PUBLIC_DOMAIN=http://localhost:9000
S3_ENDPOINT=http://localhost:9000
MINIO_LOBE_BUCKET=lobe MINIO_LOBE_BUCKET=lobe
S3_ACCESS_KEY_ID=soaucnP8Bip0TDdUjxng
S3_SECRET_ACCESS_KEY=ZPUzvY34umfcfxvWKSv0P00vczVMB6YmgJS5J9eO # Configure for casdoor
origin=http://localhost:8000

@ -16,21 +16,26 @@
LOBE_PORT=3210 LOBE_PORT=3210
CASDOOR_PORT=8000 CASDOOR_PORT=8000
MINIO_PORT=9000 MINIO_PORT=9000
APP_URL=http://localhost:3210
AUTH_URL=http://localhost:3210/api/auth
# Postgres 相关,也即 DB 必须的环境变量 # Postgres 相关,也即 DB 必须的环境变量
LOBE_DB_NAME=lobechat LOBE_DB_NAME=lobechat
POSTGRES_PASSWORD=uWNZugjBqixf8dxC POSTGRES_PASSWORD=uWNZugjBqixf8dxC
AUTH_CASDOOR_ISSUER=http://localhost:8000
# Casdoor secret # Casdoor secret
AUTH_CASDOOR_ID=a387a4892ee19b1a2249 AUTH_CASDOOR_ID=a387a4892ee19b1a2249
AUTH_CASDOOR_SECRET=dbf205949d704de81b0b5b3603174e23fbecc354 AUTH_CASDOOR_SECRET=dbf205949d704de81b0b5b3603174e23fbecc354
# MinIO S3 配置 # MinIO S3 配置
MINIO_ROOT_USER=YOUR_MINIO_USER MINIO_ROOT_USER=admin
MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD MINIO_ROOT_PASSWORD=YOUR_MINIO_PASSWORD
# 在下方配置 minio 中添加的桶 # 在下方配置 minio 中添加的桶
S3_PUBLIC_DOMAIN=http://localhost:9000
S3_ENDPOINT=http://localhost:9000
MINIO_LOBE_BUCKET=lobe MINIO_LOBE_BUCKET=lobe
S3_ACCESS_KEY_ID=soaucnP8Bip0TDdUjxng
S3_SECRET_ACCESS_KEY=ZPUzvY34umfcfxvWKSv0P00vczVMB6YmgJS5J9eO
# 为 casdoor 配置
origin=http://localhost:8000

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save