Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/lobehub/lobe-chat
Browse files Browse the repository at this point in the history
  • Loading branch information
actions-user committed Oct 12, 2024
2 parents 9d3505d + 5f4dcab commit 771de50
Show file tree
Hide file tree
Showing 7 changed files with 177 additions and 20 deletions.
59 changes: 59 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,65 @@

# Changelog

### [Version 1.21.16](https://github.com/lobehub/lobe-chat/compare/v1.21.15...v1.21.16)

<sup>Released on **2024-10-12**</sup>

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.21.15](https://github.com/lobehub/lobe-chat/compare/v1.21.14...v1.21.15)

<sup>Released on **2024-10-12**</sup>

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.21.14](https://github.com/lobehub/lobe-chat/compare/v1.21.13...v1.21.14)

<sup>Released on **2024-10-12**</sup>

#### 💄 Styles

- **misc**: Fix artifacts render markdown.

<br/>

<details>
<summary><kbd>Improvements and Fixes</kbd></summary>

#### Styles

- **misc**: Fix artifacts render markdown, closes [#4327](https://github.com/lobehub/lobe-chat/issues/4327) ([6bb6ea6](https://github.com/lobehub/lobe-chat/commit/6bb6ea6))

</details>

<div align="right">

[![](https://img.shields.io/badge/-BACK_TO_TOP-151515?style=flat-square)](#readme-top)

</div>

### [Version 1.21.13](https://github.com/lobehub/lobe-chat/compare/v1.21.12...v1.21.13)

<sup>Released on **2024-10-11**</sup>
Expand Down
24 changes: 16 additions & 8 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -33,23 +33,31 @@ RUN \
FROM base AS builder

ARG USE_CN_MIRROR
ARG NEXT_PUBLIC_BASE_PATH
ARG NEXT_PUBLIC_SENTRY_DSN
ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
ARG NEXT_PUBLIC_POSTHOG_HOST
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_ANALYTICS_UMAMI
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID

ENV NEXT_PUBLIC_BASE_PATH=""
ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"

# Sentry
ENV NEXT_PUBLIC_SENTRY_DSN="" \
ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
SENTRY_ORG="" \
SENTRY_PROJECT=""

# Posthog
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \
NEXT_PUBLIC_POSTHOG_HOST="" \
NEXT_PUBLIC_POSTHOG_KEY=""
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"

# Umami
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID=""
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"

# Node
ENV NODE_OPTIONS="--max-old-space-size=8192"
Expand Down
29 changes: 20 additions & 9 deletions Dockerfile.database
Original file line number Diff line number Diff line change
Expand Up @@ -33,27 +33,38 @@ RUN \
FROM base AS builder

ARG USE_CN_MIRROR

ENV NEXT_PUBLIC_SERVICE_MODE="server" \
ARG NEXT_PUBLIC_BASE_PATH
ARG NEXT_PUBLIC_SERVICE_MODE
ARG NEXT_PUBLIC_SENTRY_DSN
ARG NEXT_PUBLIC_ANALYTICS_POSTHOG
ARG NEXT_PUBLIC_POSTHOG_HOST
ARG NEXT_PUBLIC_POSTHOG_KEY
ARG NEXT_PUBLIC_ANALYTICS_UMAMI
ARG NEXT_PUBLIC_UMAMI_SCRIPT_URL
ARG NEXT_PUBLIC_UMAMI_WEBSITE_ID

ENV NEXT_PUBLIC_BASE_PATH="${NEXT_PUBLIC_BASE_PATH}"

ENV NEXT_PUBLIC_SERVICE_MODE="${NEXT_PUBLIC_SERVICE_MODE:-server}" \
APP_URL="http://app.com" \
DATABASE_DRIVER="node" \
DATABASE_URL="postgres://postgres:password@localhost:5432/postgres" \
KEY_VAULTS_SECRET="use-for-build"

# Sentry
ENV NEXT_PUBLIC_SENTRY_DSN="" \
ENV NEXT_PUBLIC_SENTRY_DSN="${NEXT_PUBLIC_SENTRY_DSN}" \
SENTRY_ORG="" \
SENTRY_PROJECT=""

# Posthog
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="" \
NEXT_PUBLIC_POSTHOG_HOST="" \
NEXT_PUBLIC_POSTHOG_KEY=""
ENV NEXT_PUBLIC_ANALYTICS_POSTHOG="${NEXT_PUBLIC_ANALYTICS_POSTHOG}" \
NEXT_PUBLIC_POSTHOG_HOST="${NEXT_PUBLIC_POSTHOG_HOST}" \
NEXT_PUBLIC_POSTHOG_KEY="${NEXT_PUBLIC_POSTHOG_KEY}"

# Umami
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID=""
ENV NEXT_PUBLIC_ANALYTICS_UMAMI="${NEXT_PUBLIC_ANALYTICS_UMAMI}" \
NEXT_PUBLIC_UMAMI_SCRIPT_URL="${NEXT_PUBLIC_UMAMI_SCRIPT_URL}" \
NEXT_PUBLIC_UMAMI_WEBSITE_ID="${NEXT_PUBLIC_UMAMI_WEBSITE_ID}"

# Node
ENV NODE_OPTIONS="--max-old-space-size=8192"
Expand Down
71 changes: 71 additions & 0 deletions docs/self-hosting/environment-variables.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,74 @@ LobeChat provides some additional configuration options when deployed, which can
<Cards href={'environment-variables/s3'} title={'S3 Storage Service'} />
<Cards href={'environment-variables/analytics'} title={'Data Analytics'} />
</Cards>

## Building a Custom Image with Overridden NEXT_PUBLIC Variables

If you need to override NEXT_PUBLIC environment variables, you can build a custom Docker image using GitHub Actions
without forking the entire LobeChat repository. Here's a guide on how to do this:

1. Create a new GitHub repository for your custom build.

2. In your new repository, create a `.github/workflows` directory.

3. Inside the `.github/workflows` directory, create a file named `build-custom-lobe.yml`:
```yaml
name: Build Custom Image

on:
workflow_dispatch: # Manual trigger

env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository_owner }}/lobe-chat-database # Name of your image

jobs:
build-and-push:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write

steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
repository: lobehub/lobe-chat

- name: Log in to the Container registry
uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}

- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
file: Dockerfile.database # Change dockerfile if needed
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# List all variables you need to overwrite
build-args: |
NEXT_PUBLIC_BASE_PATH=${{ secrets.NEXT_PUBLIC_BASE_PATH }}
NEXT_PUBLIC_SERVICE_MODE=${{ secrets.NEXT_PUBLIC_SERVICE_MODE }}
```
4. In your GitHub Repository settings > Secrets and variables > Actions > Repository secrets,
add any NEXT_PUBLIC variables you want to override
5. Set "Read and write" permissions for workflows in Repository settings > Actions > General > Workflow permissions.
6. To build your custom image, go to the "Actions" tab in your GitHub repository and manually trigger the
"Build Custom LobeChat Image" workflow.
This approach allows you to create a custom build with your desired NEXT_PUBLIC variables without maintaining
a full fork of the LobeChat repository. You can trigger a new build whenever you need to update your custom image.
2 changes: 1 addition & 1 deletion netlify.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ command = "pnpm run build"
publish = ".next"

[build.environment]
NODE_OPTIONS = "--max_old_space_size=8192"
NODE_OPTIONS = "--max_old_space_size=4096"

[template.environment]
OPENAI_API_KEY = "set your OpenAI API Key"
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@lobehub/chat",
"version": "1.21.13",
"version": "1.21.16",
"description": "Lobe Chat - an open-source, high-performance chatbot framework that supports speech synthesis, multimodal, and extensible Function Call plugin system. Supports one-click free deployment of your private ChatGPT/LLM web application.",
"keywords": [
"framework",
Expand Down Expand Up @@ -163,7 +163,7 @@
"jose": "^5.7.0",
"js-sha256": "^0.11.0",
"jsonl-parse-stringify": "^1.0.3",
"langchain": "^0.2.17",
"langchain": "^0.3.0",
"langfuse": "^3.19.0",
"langfuse-core": "^3.19.0",
"lodash-es": "^4.17.21",
Expand Down
8 changes: 8 additions & 0 deletions src/config/modelProviders/siliconcloud.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const SiliconCloud: ModelProviderCard = {
description: 'DeepSeek V2.5 集合了先前版本的优秀特征,增强了通用和编码能力。',
displayName: 'DeepSeek V2.5',
enabled: true,
functionCall: true,
id: 'deepseek-ai/DeepSeek-V2.5',
pricing: {
currency: 'CNY',
Expand All @@ -19,6 +20,7 @@ const SiliconCloud: ModelProviderCard = {
description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
displayName: 'Qwen2.5 7B',
enabled: true,
functionCall: true,
id: 'Qwen/Qwen2.5-7B-Instruct',
pricing: {
currency: 'CNY',
Expand All @@ -30,6 +32,7 @@ const SiliconCloud: ModelProviderCard = {
{
description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
displayName: 'Qwen2.5 14B',
functionCall: true,
id: 'Qwen/Qwen2.5-14B-Instruct',
pricing: {
currency: 'CNY',
Expand All @@ -41,6 +44,7 @@ const SiliconCloud: ModelProviderCard = {
{
description: 'Qwen2.5 是全新的大型语言模型系列,旨在优化指令式任务的处理。',
displayName: 'Qwen2.5 32B',
functionCall: true,
id: 'Qwen/Qwen2.5-32B-Instruct',
pricing: {
currency: 'CNY',
Expand All @@ -53,6 +57,7 @@ const SiliconCloud: ModelProviderCard = {
description: 'Qwen2.5 是全新的大型语言模型系列,具有更强的理解和生成能力。',
displayName: 'Qwen2.5 72B',
enabled: true,
functionCall: true,
id: 'Qwen/Qwen2.5-72B-Instruct',
pricing: {
currency: 'CNY',
Expand Down Expand Up @@ -88,6 +93,7 @@ const SiliconCloud: ModelProviderCard = {
{
description: 'InternLM2.5 提供多场景下的智能对话解决方案。',
displayName: 'Internlm 2.5 7B',
functionCall: true,
id: 'internlm/internlm2_5-7b-chat',
pricing: {
currency: 'CNY',
Expand All @@ -99,6 +105,7 @@ const SiliconCloud: ModelProviderCard = {
{
description: '创新的开源模型InternLM2.5,通过大规模的参数提高了对话智能。',
displayName: 'Internlm 2.5 20B',
functionCall: true,
id: 'internlm/internlm2_5-20b-chat',
pricing: {
currency: 'CNY',
Expand All @@ -110,6 +117,7 @@ const SiliconCloud: ModelProviderCard = {
{
description: 'GLM-4 9B 开放源码版本,为会话应用提供优化后的对话体验。',
displayName: 'GLM-4 9B',
functionCall: true,
id: 'THUDM/glm-4-9b-chat',
pricing: {
currency: 'CNY',
Expand Down

0 comments on commit 771de50

Please sign in to comment.