mirror of
https://github.com/stackblitz-labs/bolt.diy.git
synced 2025-06-18 01:49:07 +01:00
Merge branch 'stackblitz-labs:main' into supabase
This commit is contained in:
commit
9fd5f149c9
43
.github/workflows/docker.yaml
vendored
43
.github/workflows/docker.yaml
vendored
@ -2,11 +2,14 @@ name: Docker Publish
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable
|
||||
branches: [main, stable]
|
||||
tags: ['v*', '*.*.*']
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
@ -18,13 +21,14 @@ env:
|
||||
jobs:
|
||||
docker-build-publish:
|
||||
runs-on: ubuntu-latest
|
||||
# timeout-minutes: 30
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@ -37,25 +41,22 @@ jobs:
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=raw,value=stable,enable=${{ github.ref == 'refs/heads/stable' }}
|
||||
type=ref,event=tag
|
||||
type=sha,format=short
|
||||
type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || github.ref == 'refs/heads/stable' }}
|
||||
|
||||
- name: Build and push Docker image for main
|
||||
if: github.ref == 'refs/heads/main'
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
target: bolt-ai-production
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Build and push Docker image for stable
|
||||
if: github.ref == 'refs/heads/stable'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:stable
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Check manifest
|
||||
run: docker buildx imagetools inspect ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
91
.github/workflows/electron.yml
vendored
Normal file
91
.github/workflows/electron.yml
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
name: Electron Build and Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- electron
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
node-version: [18.18.0]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 9.14.4
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
|
||||
# Install Linux dependencies
|
||||
- name: Install Linux dependencies
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y rpm
|
||||
|
||||
# Build
|
||||
- name: Build Electron app
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
run: |
|
||||
if [ "$RUNNER_OS" == "Windows" ]; then
|
||||
pnpm run electron:build:win
|
||||
elif [ "$RUNNER_OS" == "macOS" ]; then
|
||||
pnpm run electron:build:mac
|
||||
else
|
||||
pnpm run electron:build:linux
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
# Create Release
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: ${{ github.ref_type == 'branch' }}
|
||||
files: |
|
||||
dist/*.exe
|
||||
dist/*.dmg
|
||||
dist/*.deb
|
||||
dist/*.AppImage
|
||||
dist/*.zip
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
@ -1,2 +0,0 @@
|
||||
nodejs 20.15.1
|
||||
pnpm 9.4.0
|
43
README.md
43
README.md
@ -1,8 +1,8 @@
|
||||
# bolt.diy (Previously oTToDev)
|
||||
# bolt.diy
|
||||
|
||||
[](https://bolt.diy)
|
||||
|
||||
Welcome to bolt.diy, the official open source version of Bolt.new (previously known as oTToDev and bolt.new ANY LLM), which allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
|
||||
Welcome to bolt.diy, the official open source version of Bolt.new, which allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
|
||||
|
||||
-----
|
||||
Check the [bolt.diy Docs](https://stackblitz-labs.github.io/bolt.diy/) for more offical installation instructions and more informations.
|
||||
@ -83,7 +83,8 @@ project, please check the [project management guide](./PROJECT.md) to get starte
|
||||
- ⬜ **HIGH PRIORITY** - Prevent bolt from rewriting files as often (file locking and diffs)
|
||||
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
|
||||
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
|
||||
- ⬜ Deploy directly to Vercel/Netlify/other similar platforms
|
||||
- ✅ Deploy directly to Netlify (@xKevIsDev)
|
||||
- ⬜ Supabase Integration
|
||||
- ⬜ Have LLM plan the project in a MD file for better results/transparency
|
||||
- ⬜ VSCode Integration with git-like confirmations
|
||||
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
|
||||
@ -101,8 +102,9 @@ project, please check the [project management guide](./PROJECT.md) to get starte
|
||||
- **Attach images to prompts** for better contextual understanding.
|
||||
- **Integrated terminal** to view output of LLM-run commands.
|
||||
- **Revert code to earlier versions** for easier debugging and quicker changes.
|
||||
- **Download projects as ZIP** for easy portability.
|
||||
- **Download projects as ZIP** for easy portability Sync to a folder on the host.
|
||||
- **Integration-ready Docker support** for a hassle-free setup.
|
||||
- **Deploy** directly to **Netlify**
|
||||
|
||||
## Setup
|
||||
|
||||
@ -241,8 +243,7 @@ This method is recommended for developers who want to:
|
||||
1. **Clone the Repository**:
|
||||
|
||||
```bash
|
||||
# Using HTTPS
|
||||
git clone https://github.com/stackblitz-labs/bolt.diy.git
|
||||
git clone -b stable https://github.com/stackblitz-labs/bolt.diy.git
|
||||
```
|
||||
|
||||
2. **Navigate to Project Directory**:
|
||||
@ -251,21 +252,31 @@ This method is recommended for developers who want to:
|
||||
cd bolt.diy
|
||||
```
|
||||
|
||||
3. **Switch to the Main Branch**:
|
||||
```bash
|
||||
git checkout main
|
||||
```
|
||||
4. **Install Dependencies**:
|
||||
3. **Install Dependencies**:
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
5. **Start the Development Server**:
|
||||
4. **Start the Development Server**:
|
||||
```bash
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
5. **(OPTIONAL)** Switch to the Main Branch if you want to use pre-release/testbranch:
|
||||
```bash
|
||||
git checkout main
|
||||
pnpm install
|
||||
pnpm run dev
|
||||
```
|
||||
Hint: Be aware that this can have beta-features and more likely got bugs than the stable release
|
||||
|
||||
>**Open the WebUI to test (Default: http://localhost:5173)**
|
||||
> - Beginngers:
|
||||
> - Try to use a sophisticated Provider/Model like Anthropic with Claude Sonnet 3.x Models to get best results
|
||||
> - Explanation: The System Prompt currently implemented in bolt.diy cant cover the best performance for all providers and models out there. So it works better with some models, then other, even if the models itself are perfect for >programming
|
||||
> - Future: Planned is a Plugin/Extentions-Library so there can be different System Prompts for different Models, which will help to get better results
|
||||
|
||||
#### Staying Updated
|
||||
|
||||
To get the latest changes from the repository:
|
||||
@ -279,7 +290,7 @@ To get the latest changes from the repository:
|
||||
2. **Pull Latest Updates**:
|
||||
|
||||
```bash
|
||||
git pull origin main
|
||||
git pull
|
||||
```
|
||||
|
||||
3. **Update Dependencies**:
|
||||
@ -349,3 +360,9 @@ Explore upcoming features and priorities on our [Roadmap](https://roadmap.sh/r/o
|
||||
## FAQ
|
||||
|
||||
For answers to common questions, issues, and to see a list of recommended models, visit our [FAQ Page](FAQ.md).
|
||||
|
||||
|
||||
# Licensing
|
||||
**Who needs a commercial WebContainer API license?**
|
||||
|
||||
bolt.diy source code is distributed as MIT, but it uses WebContainers API that [requires licensing](https://webcontainers.io/enterprise) for production usage in a commercial, for-profit setting. (Prototypes or POCs do not require a commercial license.) If you're using the API to meet the needs of your customers, prospective customers, and/or employees, you need a license to ensure compliance with our Terms of Service. Usage of the API in violation of these terms may result in your access being revoked.
|
||||
|
@ -1,6 +1,19 @@
|
||||
import { motion } from 'framer-motion';
|
||||
import { GithubConnection } from './GithubConnection';
|
||||
import { NetlifyConnection } from './NetlifyConnection';
|
||||
import React, { Suspense } from 'react';
|
||||
|
||||
// Use React.lazy for dynamic imports
|
||||
const GithubConnection = React.lazy(() => import('./GithubConnection'));
|
||||
const NetlifyConnection = React.lazy(() => import('./NetlifyConnection'));
|
||||
|
||||
// Loading fallback component
|
||||
const LoadingFallback = () => (
|
||||
<div className="p-4 bg-white dark:bg-[#0A0A0A] rounded-lg border border-[#E5E5E5] dark:border-[#1A1A1A]">
|
||||
<div className="flex items-center gap-2 text-bolt-elements-textSecondary">
|
||||
<div className="i-ph:spinner-gap w-5 h-5 animate-spin" />
|
||||
<span>Loading connection...</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
export default function ConnectionsTab() {
|
||||
return (
|
||||
@ -20,8 +33,12 @@ export default function ConnectionsTab() {
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-1 gap-4">
|
||||
<GithubConnection />
|
||||
<NetlifyConnection />
|
||||
<Suspense fallback={<LoadingFallback />}>
|
||||
<GithubConnection />
|
||||
</Suspense>
|
||||
<Suspense fallback={<LoadingFallback />}>
|
||||
<NetlifyConnection />
|
||||
</Suspense>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
@ -66,7 +66,7 @@ interface GitHubConnection {
|
||||
stats?: GitHubStats;
|
||||
}
|
||||
|
||||
export function GithubConnection() {
|
||||
export default function GithubConnection() {
|
||||
const [connection, setConnection] = useState<GitHubConnection>({
|
||||
user: null,
|
||||
token: '',
|
||||
@ -77,6 +77,46 @@ export function GithubConnection() {
|
||||
const [isFetchingStats, setIsFetchingStats] = useState(false);
|
||||
const [isStatsExpanded, setIsStatsExpanded] = useState(false);
|
||||
|
||||
const fetchGithubUser = async (token: string) => {
|
||||
try {
|
||||
setIsConnecting(true);
|
||||
|
||||
const response = await fetch('https://api.github.com/user', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Invalid token or unauthorized');
|
||||
}
|
||||
|
||||
const data = (await response.json()) as GitHubUserResponse;
|
||||
const newConnection: GitHubConnection = {
|
||||
user: data,
|
||||
token,
|
||||
tokenType: connection.tokenType,
|
||||
};
|
||||
|
||||
localStorage.setItem('github_connection', JSON.stringify(newConnection));
|
||||
Cookies.set('githubToken', token);
|
||||
Cookies.set('githubUsername', data.login);
|
||||
Cookies.set('git:github.com', JSON.stringify({ username: token, password: 'x-oauth-basic' }));
|
||||
|
||||
setConnection(newConnection);
|
||||
|
||||
await fetchGitHubStats(token);
|
||||
|
||||
toast.success('Successfully connected to GitHub');
|
||||
} catch (error) {
|
||||
logStore.logError('Failed to authenticate with GitHub', { error });
|
||||
toast.error('Failed to connect to GitHub');
|
||||
setConnection({ user: null, token: '', tokenType: 'classic' });
|
||||
} finally {
|
||||
setIsConnecting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const fetchGitHubStats = async (token: string) => {
|
||||
try {
|
||||
setIsFetchingStats(true);
|
||||
@ -182,51 +222,25 @@ export function GithubConnection() {
|
||||
|
||||
setIsLoading(false);
|
||||
}, []);
|
||||
useEffect(() => {
|
||||
if (!connection) {
|
||||
return;
|
||||
}
|
||||
|
||||
const token = connection.token;
|
||||
const data = connection.user;
|
||||
Cookies.set('githubToken', token);
|
||||
Cookies.set('git:github.com', JSON.stringify({ username: token, password: 'x-oauth-basic' }));
|
||||
|
||||
if (data) {
|
||||
Cookies.set('githubUsername', data.login);
|
||||
}
|
||||
}, [connection]);
|
||||
|
||||
if (isLoading || isConnecting || isFetchingStats) {
|
||||
return <LoadingSpinner />;
|
||||
}
|
||||
|
||||
const fetchGithubUser = async (token: string) => {
|
||||
try {
|
||||
setIsConnecting(true);
|
||||
|
||||
const response = await fetch('https://api.github.com/user', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Invalid token or unauthorized');
|
||||
}
|
||||
|
||||
const data = (await response.json()) as GitHubUserResponse;
|
||||
const newConnection: GitHubConnection = {
|
||||
user: data,
|
||||
token,
|
||||
tokenType: connection.tokenType,
|
||||
};
|
||||
|
||||
localStorage.setItem('github_connection', JSON.stringify(newConnection));
|
||||
Cookies.set('githubToken', token);
|
||||
Cookies.set('githubUsername', data.login);
|
||||
Cookies.set('git:github.com', JSON.stringify({ username: token, password: 'x-oauth-basic' }));
|
||||
|
||||
setConnection(newConnection);
|
||||
|
||||
await fetchGitHubStats(token);
|
||||
|
||||
toast.success('Successfully connected to GitHub');
|
||||
} catch (error) {
|
||||
logStore.logError('Failed to authenticate with GitHub', { error });
|
||||
toast.error('Failed to connect to GitHub');
|
||||
setConnection({ user: null, token: '', tokenType: 'classic' });
|
||||
} finally {
|
||||
setIsConnecting(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleConnect = async (event: React.FormEvent) => {
|
||||
event.preventDefault();
|
||||
await fetchGithubUser(connection.token);
|
||||
|
@ -13,7 +13,7 @@ import {
|
||||
} from '~/lib/stores/netlify';
|
||||
import type { NetlifyUser } from '~/types/netlify';
|
||||
|
||||
export function NetlifyConnection() {
|
||||
export default function NetlifyConnection() {
|
||||
const connection = useStore(netlifyConnection);
|
||||
const connecting = useStore(isConnecting);
|
||||
const fetchingStats = useStore(isFetchingStats);
|
||||
|
@ -292,11 +292,24 @@ export function RepositorySelectionDialog({ isOpen, onClose, onSelect }: Reposit
|
||||
|
||||
const connection = getLocalStorage('github_connection');
|
||||
const headers: HeadersInit = connection?.token ? { Authorization: `Bearer ${connection.token}` } : {};
|
||||
|
||||
// Fetch repository tree
|
||||
const treeResponse = await fetch(`https://api.github.com/repos/${owner}/${repo}/git/trees/main?recursive=1`, {
|
||||
const repoObjResponse = await fetch(`https://api.github.com/repos/${owner}/${repo}`, {
|
||||
headers,
|
||||
});
|
||||
const repoObjData = (await repoObjResponse.json()) as any;
|
||||
|
||||
if (!repoObjData.default_branch) {
|
||||
throw new Error('Failed to fetch repository branch');
|
||||
}
|
||||
|
||||
const defaultBranch = repoObjData.default_branch;
|
||||
|
||||
// Fetch repository tree
|
||||
const treeResponse = await fetch(
|
||||
`https://api.github.com/repos/${owner}/${repo}/git/trees/${defaultBranch}?recursive=1`,
|
||||
{
|
||||
headers,
|
||||
},
|
||||
);
|
||||
|
||||
if (!treeResponse.ok) {
|
||||
throw new Error('Failed to fetch repository structure');
|
||||
|
@ -1353,7 +1353,9 @@ export default function DebugTab() {
|
||||
</div>
|
||||
<div className="text-xs text-bolt-elements-textSecondary mt-2 flex items-center gap-1.5">
|
||||
<div className="i-ph:code w-3.5 h-3.5 text-purple-500" />
|
||||
DOM Ready: {systemInfo ? (systemInfo.performance.timing.domReadyTime / 1000).toFixed(2) : '-'}s
|
||||
DOM Ready: {systemInfo
|
||||
? (systemInfo.performance.timing.domReadyTime / 1000).toFixed(2)
|
||||
: '-'}s
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
@ -323,7 +323,16 @@ export const ChatImpl = memo(
|
||||
{
|
||||
id: `1-${new Date().getTime()}`,
|
||||
role: 'user',
|
||||
content: messageContent,
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: `[Model: ${model}]\n\n[Provider: ${provider.name}]\n\n${messageContent}`,
|
||||
},
|
||||
...imageDataList.map((imageData) => ({
|
||||
type: 'image',
|
||||
image: imageData,
|
||||
})),
|
||||
] as any,
|
||||
},
|
||||
{
|
||||
id: `2-${new Date().getTime()}`,
|
||||
@ -338,6 +347,15 @@ export const ChatImpl = memo(
|
||||
},
|
||||
]);
|
||||
reload();
|
||||
setInput('');
|
||||
Cookies.remove(PROMPT_COOKIE_KEY);
|
||||
|
||||
setUploadedFiles([]);
|
||||
setImageDataList([]);
|
||||
|
||||
resetEnhancer();
|
||||
|
||||
textareaRef.current?.blur();
|
||||
setFakeLoading(false);
|
||||
|
||||
return;
|
||||
@ -364,6 +382,15 @@ export const ChatImpl = memo(
|
||||
]);
|
||||
reload();
|
||||
setFakeLoading(false);
|
||||
setInput('');
|
||||
Cookies.remove(PROMPT_COOKIE_KEY);
|
||||
|
||||
setUploadedFiles([]);
|
||||
setImageDataList([]);
|
||||
|
||||
resetEnhancer();
|
||||
|
||||
textareaRef.current?.blur();
|
||||
|
||||
return;
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ export function useShortcuts(): void {
|
||||
}
|
||||
|
||||
// Debug logging in development only
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
if (import.meta.env.DEV) {
|
||||
console.log('Key pressed:', {
|
||||
key: event.key,
|
||||
code: event.code,
|
||||
|
@ -13,6 +13,12 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
};
|
||||
|
||||
staticModels: ModelInfo[] = [
|
||||
{
|
||||
name: 'claude-3-7-sonnet-20250219',
|
||||
label: 'Claude 3.7 Sonnet',
|
||||
provider: 'Anthropic',
|
||||
maxTokenAllowed: 8000,
|
||||
},
|
||||
{
|
||||
name: 'claude-3-5-sonnet-latest',
|
||||
label: 'Claude 3.5 Sonnet (new)',
|
||||
@ -46,7 +52,7 @@ export default class AnthropicProvider extends BaseProvider {
|
||||
providerSettings: settings,
|
||||
serverEnv: serverEnv as any,
|
||||
defaultBaseUrlKey: '',
|
||||
defaultApiTokenKey: 'OPENAI_API_KEY',
|
||||
defaultApiTokenKey: 'ANTHROPIC_API_KEY',
|
||||
});
|
||||
|
||||
if (!apiKey) {
|
||||
|
@ -75,7 +75,7 @@ export default class LMStudioProvider extends BaseProvider {
|
||||
throw new Error('No baseUrl found for LMStudio provider');
|
||||
}
|
||||
|
||||
const isDocker = process.env.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
|
||||
if (typeof window === 'undefined') {
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
|
@ -27,8 +27,6 @@ export interface OllamaApiResponse {
|
||||
models: OllamaModel[];
|
||||
}
|
||||
|
||||
export const DEFAULT_NUM_CTX = process?.env?.DEFAULT_NUM_CTX ? parseInt(process.env.DEFAULT_NUM_CTX, 10) : 32768;
|
||||
|
||||
export default class OllamaProvider extends BaseProvider {
|
||||
name = 'Ollama';
|
||||
getApiKeyLink = 'https://ollama.com/download';
|
||||
@ -41,6 +39,26 @@ export default class OllamaProvider extends BaseProvider {
|
||||
|
||||
staticModels: ModelInfo[] = [];
|
||||
|
||||
private _convertEnvToRecord(env?: Env): Record<string, string> {
|
||||
if (!env) {
|
||||
return {};
|
||||
}
|
||||
|
||||
// Convert Env to a plain object with string values
|
||||
return Object.entries(env).reduce(
|
||||
(acc, [key, value]) => {
|
||||
acc[key] = String(value);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, string>,
|
||||
);
|
||||
}
|
||||
|
||||
getDefaultNumCtx(serverEnv?: Env): number {
|
||||
const envRecord = this._convertEnvToRecord(serverEnv);
|
||||
return envRecord.DEFAULT_NUM_CTX ? parseInt(envRecord.DEFAULT_NUM_CTX, 10) : 32768;
|
||||
}
|
||||
|
||||
async getDynamicModels(
|
||||
apiKeys?: Record<string, string>,
|
||||
settings?: IProviderSetting,
|
||||
@ -81,6 +99,7 @@ export default class OllamaProvider extends BaseProvider {
|
||||
maxTokenAllowed: 8000,
|
||||
}));
|
||||
}
|
||||
|
||||
getModelInstance: (options: {
|
||||
model: string;
|
||||
serverEnv?: Env;
|
||||
@ -88,10 +107,12 @@ export default class OllamaProvider extends BaseProvider {
|
||||
providerSettings?: Record<string, IProviderSetting>;
|
||||
}) => LanguageModelV1 = (options) => {
|
||||
const { apiKeys, providerSettings, serverEnv, model } = options;
|
||||
const envRecord = this._convertEnvToRecord(serverEnv);
|
||||
|
||||
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
||||
apiKeys,
|
||||
providerSettings: providerSettings?.[this.name],
|
||||
serverEnv: serverEnv as any,
|
||||
serverEnv: envRecord,
|
||||
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
|
||||
defaultApiTokenKey: '',
|
||||
});
|
||||
@ -101,14 +122,14 @@ export default class OllamaProvider extends BaseProvider {
|
||||
throw new Error('No baseUrl found for OLLAMA provider');
|
||||
}
|
||||
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || serverEnv?.RUNNING_IN_DOCKER === 'true';
|
||||
const isDocker = process?.env?.RUNNING_IN_DOCKER === 'true' || envRecord.RUNNING_IN_DOCKER === 'true';
|
||||
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
||||
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
||||
|
||||
logger.debug('Ollama Base Url used: ', baseUrl);
|
||||
|
||||
const ollamaInstance = ollama(model, {
|
||||
numCtx: DEFAULT_NUM_CTX,
|
||||
numCtx: this.getDefaultNumCtx(serverEnv),
|
||||
}) as LanguageModelV1 & { config: any };
|
||||
|
||||
ollamaInstance.config.baseURL = `${baseUrl}/api`;
|
||||
|
7
app/lib/persistence/types.ts
Normal file
7
app/lib/persistence/types.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import type { FileMap } from '~/lib/stores/files';
|
||||
|
||||
export interface Snapshot {
|
||||
chatIndex: string;
|
||||
files: FileMap;
|
||||
summary?: string;
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
import { useLoaderData, useNavigate, useSearchParams } from '@remix-run/react';
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { atom } from 'nanostores';
|
||||
import type { Message } from 'ai';
|
||||
import { generateId, type JSONValue, type Message } from 'ai';
|
||||
import { toast } from 'react-toastify';
|
||||
import { workbenchStore } from '~/lib/stores/workbench';
|
||||
import { logStore } from '~/lib/stores/logs'; // Import logStore
|
||||
@ -15,6 +15,11 @@ import {
|
||||
createChatFromMessages,
|
||||
type IChatMetadata,
|
||||
} from './db';
|
||||
import type { FileMap } from '~/lib/stores/files';
|
||||
import type { Snapshot } from './types';
|
||||
import { webcontainer } from '~/lib/webcontainer';
|
||||
import { createCommandsMessage, detectProjectCommands } from '~/utils/projectCommands';
|
||||
import type { ContextAnnotation } from '~/types/context';
|
||||
|
||||
export interface ChatHistoryItem {
|
||||
id: string;
|
||||
@ -37,6 +42,7 @@ export function useChatHistory() {
|
||||
const { id: mixedId } = useLoaderData<{ id?: string }>();
|
||||
const [searchParams] = useSearchParams();
|
||||
|
||||
const [archivedMessages, setArchivedMessages] = useState<Message[]>([]);
|
||||
const [initialMessages, setInitialMessages] = useState<Message[]>([]);
|
||||
const [ready, setReady] = useState<boolean>(false);
|
||||
const [urlId, setUrlId] = useState<string | undefined>();
|
||||
@ -56,14 +62,128 @@ export function useChatHistory() {
|
||||
|
||||
if (mixedId) {
|
||||
getMessages(db, mixedId)
|
||||
.then((storedMessages) => {
|
||||
.then(async (storedMessages) => {
|
||||
if (storedMessages && storedMessages.messages.length > 0) {
|
||||
const snapshotStr = localStorage.getItem(`snapshot:${mixedId}`);
|
||||
const snapshot: Snapshot = snapshotStr ? JSON.parse(snapshotStr) : { chatIndex: 0, files: {} };
|
||||
const summary = snapshot.summary;
|
||||
|
||||
const rewindId = searchParams.get('rewindTo');
|
||||
const filteredMessages = rewindId
|
||||
? storedMessages.messages.slice(0, storedMessages.messages.findIndex((m) => m.id === rewindId) + 1)
|
||||
: storedMessages.messages;
|
||||
let startingIdx = -1;
|
||||
const endingIdx = rewindId
|
||||
? storedMessages.messages.findIndex((m) => m.id === rewindId) + 1
|
||||
: storedMessages.messages.length;
|
||||
const snapshotIndex = storedMessages.messages.findIndex((m) => m.id === snapshot.chatIndex);
|
||||
|
||||
if (snapshotIndex >= 0 && snapshotIndex < endingIdx) {
|
||||
startingIdx = snapshotIndex;
|
||||
}
|
||||
|
||||
if (snapshotIndex > 0 && storedMessages.messages[snapshotIndex].id == rewindId) {
|
||||
startingIdx = -1;
|
||||
}
|
||||
|
||||
let filteredMessages = storedMessages.messages.slice(startingIdx + 1, endingIdx);
|
||||
let archivedMessages: Message[] = [];
|
||||
|
||||
if (startingIdx >= 0) {
|
||||
archivedMessages = storedMessages.messages.slice(0, startingIdx + 1);
|
||||
}
|
||||
|
||||
setArchivedMessages(archivedMessages);
|
||||
|
||||
if (startingIdx > 0) {
|
||||
const files = Object.entries(snapshot?.files || {})
|
||||
.map(([key, value]) => {
|
||||
if (value?.type !== 'file') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
content: value.content,
|
||||
path: key,
|
||||
};
|
||||
})
|
||||
.filter((x) => !!x);
|
||||
const projectCommands = await detectProjectCommands(files);
|
||||
const commands = createCommandsMessage(projectCommands);
|
||||
|
||||
filteredMessages = [
|
||||
{
|
||||
id: generateId(),
|
||||
role: 'user',
|
||||
content: `Restore project from snapshot
|
||||
`,
|
||||
annotations: ['no-store', 'hidden'],
|
||||
},
|
||||
{
|
||||
id: storedMessages.messages[snapshotIndex].id,
|
||||
role: 'assistant',
|
||||
content: ` 📦 Chat Restored from snapshot, You can revert this message to load the full chat history
|
||||
<boltArtifact id="imported-files" title="Project Files Snapshot" type="bundled">
|
||||
${Object.entries(snapshot?.files || {})
|
||||
.filter((x) => !x[0].endsWith('lock.json'))
|
||||
.map(([key, value]) => {
|
||||
if (value?.type === 'file') {
|
||||
return `
|
||||
<boltAction type="file" filePath="${key}">
|
||||
${value.content}
|
||||
</boltAction>
|
||||
`;
|
||||
} else {
|
||||
return ``;
|
||||
}
|
||||
})
|
||||
.join('\n')}
|
||||
</boltArtifact>
|
||||
`,
|
||||
annotations: [
|
||||
'no-store',
|
||||
...(summary
|
||||
? [
|
||||
{
|
||||
chatId: storedMessages.messages[snapshotIndex].id,
|
||||
type: 'chatSummary',
|
||||
summary,
|
||||
} satisfies ContextAnnotation,
|
||||
]
|
||||
: []),
|
||||
],
|
||||
},
|
||||
...(commands !== null
|
||||
? [
|
||||
{
|
||||
id: `${storedMessages.messages[snapshotIndex].id}-2`,
|
||||
role: 'user' as const,
|
||||
content: `setup project`,
|
||||
annotations: ['no-store', 'hidden'],
|
||||
},
|
||||
{
|
||||
...commands,
|
||||
id: `${storedMessages.messages[snapshotIndex].id}-3`,
|
||||
annotations: [
|
||||
'no-store',
|
||||
...(commands.annotations || []),
|
||||
...(summary
|
||||
? [
|
||||
{
|
||||
chatId: `${storedMessages.messages[snapshotIndex].id}-3`,
|
||||
type: 'chatSummary',
|
||||
summary,
|
||||
} satisfies ContextAnnotation,
|
||||
]
|
||||
: []),
|
||||
],
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...filteredMessages,
|
||||
];
|
||||
restoreSnapshot(mixedId);
|
||||
}
|
||||
|
||||
setInitialMessages(filteredMessages);
|
||||
|
||||
setUrlId(storedMessages.urlId);
|
||||
description.set(storedMessages.description);
|
||||
chatId.set(storedMessages.id);
|
||||
@ -75,10 +195,64 @@ export function useChatHistory() {
|
||||
setReady(true);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error);
|
||||
|
||||
logStore.logError('Failed to load chat messages', error);
|
||||
toast.error(error.message);
|
||||
});
|
||||
}
|
||||
}, [mixedId]);
|
||||
|
||||
const takeSnapshot = useCallback(
|
||||
async (chatIdx: string, files: FileMap, _chatId?: string | undefined, chatSummary?: string) => {
|
||||
const id = _chatId || chatId;
|
||||
|
||||
if (!id) {
|
||||
return;
|
||||
}
|
||||
|
||||
const snapshot: Snapshot = {
|
||||
chatIndex: chatIdx,
|
||||
files,
|
||||
summary: chatSummary,
|
||||
};
|
||||
localStorage.setItem(`snapshot:${id}`, JSON.stringify(snapshot));
|
||||
},
|
||||
[chatId],
|
||||
);
|
||||
|
||||
const restoreSnapshot = useCallback(async (id: string) => {
|
||||
const snapshotStr = localStorage.getItem(`snapshot:${id}`);
|
||||
const container = await webcontainer;
|
||||
|
||||
// if (snapshotStr)setSnapshot(JSON.parse(snapshotStr));
|
||||
const snapshot: Snapshot = snapshotStr ? JSON.parse(snapshotStr) : { chatIndex: 0, files: {} };
|
||||
|
||||
if (!snapshot?.files) {
|
||||
return;
|
||||
}
|
||||
|
||||
Object.entries(snapshot.files).forEach(async ([key, value]) => {
|
||||
if (key.startsWith(container.workdir)) {
|
||||
key = key.replace(container.workdir, '');
|
||||
}
|
||||
|
||||
if (value?.type === 'folder') {
|
||||
await container.fs.mkdir(key, { recursive: true });
|
||||
}
|
||||
});
|
||||
Object.entries(snapshot.files).forEach(async ([key, value]) => {
|
||||
if (value?.type === 'file') {
|
||||
if (key.startsWith(container.workdir)) {
|
||||
key = key.replace(container.workdir, '');
|
||||
}
|
||||
|
||||
await container.fs.writeFile(key, value.content, { encoding: value.isBinary ? undefined : 'utf8' });
|
||||
} else {
|
||||
}
|
||||
});
|
||||
|
||||
// workbenchStore.files.setKey(snapshot?.files)
|
||||
}, []);
|
||||
|
||||
return {
|
||||
@ -105,14 +279,34 @@ export function useChatHistory() {
|
||||
}
|
||||
|
||||
const { firstArtifact } = workbenchStore;
|
||||
messages = messages.filter((m) => !m.annotations?.includes('no-store'));
|
||||
|
||||
let _urlId = urlId;
|
||||
|
||||
if (!urlId && firstArtifact?.id) {
|
||||
const urlId = await getUrlId(db, firstArtifact.id);
|
||||
|
||||
_urlId = urlId;
|
||||
navigateChat(urlId);
|
||||
setUrlId(urlId);
|
||||
}
|
||||
|
||||
let chatSummary: string | undefined = undefined;
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
|
||||
if (lastMessage.role === 'assistant') {
|
||||
const annotations = lastMessage.annotations as JSONValue[];
|
||||
const filteredAnnotations = (annotations?.filter(
|
||||
(annotation: JSONValue) =>
|
||||
annotation && typeof annotation === 'object' && Object.keys(annotation).includes('type'),
|
||||
) || []) as { type: string; value: any } & { [key: string]: any }[];
|
||||
|
||||
if (filteredAnnotations.find((annotation) => annotation.type === 'chatSummary')) {
|
||||
chatSummary = filteredAnnotations.find((annotation) => annotation.type === 'chatSummary')?.summary;
|
||||
}
|
||||
}
|
||||
|
||||
takeSnapshot(messages[messages.length - 1].id, workbenchStore.files.get(), _urlId, chatSummary);
|
||||
|
||||
if (!description.get() && firstArtifact?.title) {
|
||||
description.set(firstArtifact?.title);
|
||||
}
|
||||
@ -127,7 +321,15 @@ export function useChatHistory() {
|
||||
}
|
||||
}
|
||||
|
||||
await setMessages(db, chatId.get() as string, messages, urlId, description.get(), undefined, chatMetadata.get());
|
||||
await setMessages(
|
||||
db,
|
||||
chatId.get() as string,
|
||||
[...archivedMessages, ...messages],
|
||||
urlId,
|
||||
description.get(),
|
||||
undefined,
|
||||
chatMetadata.get(),
|
||||
);
|
||||
},
|
||||
duplicateCurrentChat: async (listItemId: string) => {
|
||||
if (!db || (!mixedId && !listItemId)) {
|
||||
|
@ -8,6 +8,7 @@ import { createHead } from 'remix-island';
|
||||
import { useEffect } from 'react';
|
||||
import { DndProvider } from 'react-dnd';
|
||||
import { HTML5Backend } from 'react-dnd-html5-backend';
|
||||
import { ClientOnly } from 'remix-utils/client-only';
|
||||
|
||||
import reactToastifyStyles from 'react-toastify/dist/ReactToastify.css?url';
|
||||
import globalStyles from './styles/index.scss?url';
|
||||
@ -72,11 +73,11 @@ export function Layout({ children }: { children: React.ReactNode }) {
|
||||
}, [theme]);
|
||||
|
||||
return (
|
||||
<DndProvider backend={HTML5Backend}>
|
||||
{children}
|
||||
<>
|
||||
<ClientOnly>{() => <DndProvider backend={HTML5Backend}>{children}</DndProvider>}</ClientOnly>
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
</DndProvider>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,16 +1,41 @@
|
||||
import type { LoaderFunction } from '@remix-run/cloudflare';
|
||||
import { providerBaseUrlEnvKeys } from '~/utils/constants';
|
||||
import { LLMManager } from '~/lib/modules/llm/manager';
|
||||
import { getApiKeysFromCookie } from '~/lib/api/cookies';
|
||||
|
||||
export const loader: LoaderFunction = async ({ context, request }) => {
|
||||
const url = new URL(request.url);
|
||||
const provider = url.searchParams.get('provider');
|
||||
|
||||
if (!provider || !providerBaseUrlEnvKeys[provider].apiTokenKey) {
|
||||
if (!provider) {
|
||||
return Response.json({ isSet: false });
|
||||
}
|
||||
|
||||
const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey;
|
||||
const isSet = !!(process.env[envVarName] || (context?.cloudflare?.env as Record<string, any>)?.[envVarName]);
|
||||
const llmManager = LLMManager.getInstance(context?.cloudflare?.env as any);
|
||||
const providerInstance = llmManager.getProvider(provider);
|
||||
|
||||
if (!providerInstance || !providerInstance.config.apiTokenKey) {
|
||||
return Response.json({ isSet: false });
|
||||
}
|
||||
|
||||
const envVarName = providerInstance.config.apiTokenKey;
|
||||
|
||||
// Get API keys from cookie
|
||||
const cookieHeader = request.headers.get('Cookie');
|
||||
const apiKeys = getApiKeysFromCookie(cookieHeader);
|
||||
|
||||
/*
|
||||
* Check API key in order of precedence:
|
||||
* 1. Client-side API keys (from cookies)
|
||||
* 2. Server environment variables (from Cloudflare env)
|
||||
* 3. Process environment variables (from .env.local)
|
||||
* 4. LLMManager environment variables
|
||||
*/
|
||||
const isSet = !!(
|
||||
apiKeys?.[provider] ||
|
||||
(context?.cloudflare?.env as Record<string, any>)?.[envVarName] ||
|
||||
process.env[envVarName] ||
|
||||
llmManager.env[envVarName]
|
||||
);
|
||||
|
||||
return Response.json({ isSet });
|
||||
};
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { type ActionFunctionArgs, json } from '@remix-run/cloudflare';
|
||||
import crypto from 'crypto';
|
||||
import type { NetlifySiteInfo } from '~/types/netlify';
|
||||
|
||||
interface DeployRequestBody {
|
||||
@ -8,6 +7,15 @@ interface DeployRequestBody {
|
||||
chatId: string;
|
||||
}
|
||||
|
||||
async function sha1(message: string) {
|
||||
const msgBuffer = new TextEncoder().encode(message);
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-1', msgBuffer);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
|
||||
return hashHex;
|
||||
}
|
||||
|
||||
export async function action({ request }: ActionFunctionArgs) {
|
||||
try {
|
||||
const { siteId, files, token, chatId } = (await request.json()) as DeployRequestBody & { token: string };
|
||||
@ -104,7 +112,7 @@ export async function action({ request }: ActionFunctionArgs) {
|
||||
for (const [filePath, content] of Object.entries(files)) {
|
||||
// Ensure file path starts with a forward slash
|
||||
const normalizedPath = filePath.startsWith('/') ? filePath : '/' + filePath;
|
||||
const hash = crypto.createHash('sha1').update(content).digest('hex');
|
||||
const hash = await sha1(content);
|
||||
fileDigests[normalizedPath] = hash;
|
||||
}
|
||||
|
||||
|
@ -95,24 +95,28 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
|
||||
},
|
||||
});
|
||||
|
||||
// Handle streaming errors in a non-blocking way
|
||||
(async () => {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error(error);
|
||||
|
||||
return;
|
||||
try {
|
||||
for await (const part of result.fullStream) {
|
||||
if (part.type === 'error') {
|
||||
const error: any = part.error;
|
||||
logger.error('Streaming error:', error);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing stream:', error);
|
||||
}
|
||||
})();
|
||||
|
||||
// Return the text stream directly since it's already text data
|
||||
return new Response(result.textStream, {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
Connection: 'keep-alive',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Text-Encoding': 'chunked',
|
||||
},
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
|
@ -1,6 +1,47 @@
|
||||
import { json } from '@remix-run/cloudflare';
|
||||
import type { ActionFunctionArgs, LoaderFunctionArgs } from '@remix-run/cloudflare';
|
||||
|
||||
// Allowed headers to forward to the target server
|
||||
const ALLOW_HEADERS = [
|
||||
'accept-encoding',
|
||||
'accept-language',
|
||||
'accept',
|
||||
'access-control-allow-origin',
|
||||
'authorization',
|
||||
'cache-control',
|
||||
'connection',
|
||||
'content-length',
|
||||
'content-type',
|
||||
'dnt',
|
||||
'pragma',
|
||||
'range',
|
||||
'referer',
|
||||
'user-agent',
|
||||
'x-authorization',
|
||||
'x-http-method-override',
|
||||
'x-requested-with',
|
||||
];
|
||||
|
||||
// Headers to expose from the target server's response
|
||||
const EXPOSE_HEADERS = [
|
||||
'accept-ranges',
|
||||
'age',
|
||||
'cache-control',
|
||||
'content-length',
|
||||
'content-language',
|
||||
'content-type',
|
||||
'date',
|
||||
'etag',
|
||||
'expires',
|
||||
'last-modified',
|
||||
'pragma',
|
||||
'server',
|
||||
'transfer-encoding',
|
||||
'vary',
|
||||
'x-github-request-id',
|
||||
'x-redirected-url',
|
||||
];
|
||||
|
||||
// Handle all HTTP methods
|
||||
export async function action({ request, params }: ActionFunctionArgs) {
|
||||
return handleProxyRequest(request, params['*']);
|
||||
@ -16,50 +57,121 @@ async function handleProxyRequest(request: Request, path: string | undefined) {
|
||||
return json({ error: 'Invalid proxy URL format' }, { status: 400 });
|
||||
}
|
||||
|
||||
const url = new URL(request.url);
|
||||
|
||||
// Reconstruct the target URL
|
||||
const targetURL = `https://${path}${url.search}`;
|
||||
|
||||
// Forward the request to the target URL
|
||||
const response = await fetch(targetURL, {
|
||||
method: request.method,
|
||||
headers: {
|
||||
...Object.fromEntries(request.headers),
|
||||
|
||||
// Override host header with the target host
|
||||
host: new URL(targetURL).host,
|
||||
},
|
||||
body: ['GET', 'HEAD'].includes(request.method) ? null : await request.arrayBuffer(),
|
||||
});
|
||||
|
||||
// Create response with CORS headers
|
||||
const corsHeaders = {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': '*',
|
||||
};
|
||||
|
||||
// Handle preflight requests
|
||||
// Handle CORS preflight request
|
||||
if (request.method === 'OPTIONS') {
|
||||
return new Response(null, {
|
||||
headers: corsHeaders,
|
||||
status: 204,
|
||||
status: 200,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'POST, GET, OPTIONS',
|
||||
'Access-Control-Allow-Headers': ALLOW_HEADERS.join(', '),
|
||||
'Access-Control-Expose-Headers': EXPOSE_HEADERS.join(', '),
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Forward the response with CORS headers
|
||||
const responseHeaders = new Headers(response.headers);
|
||||
Object.entries(corsHeaders).forEach(([key, value]) => {
|
||||
responseHeaders.set(key, value);
|
||||
});
|
||||
// Extract domain and remaining path
|
||||
const parts = path.match(/([^\/]+)\/?(.*)/);
|
||||
|
||||
if (!parts) {
|
||||
return json({ error: 'Invalid path format' }, { status: 400 });
|
||||
}
|
||||
|
||||
const domain = parts[1];
|
||||
const remainingPath = parts[2] || '';
|
||||
|
||||
// Reconstruct the target URL with query parameters
|
||||
const url = new URL(request.url);
|
||||
const targetURL = `https://${domain}/${remainingPath}${url.search}`;
|
||||
|
||||
console.log('Target URL:', targetURL);
|
||||
|
||||
// Filter and prepare headers
|
||||
const headers = new Headers();
|
||||
|
||||
// Only forward allowed headers
|
||||
for (const header of ALLOW_HEADERS) {
|
||||
if (request.headers.has(header)) {
|
||||
headers.set(header, request.headers.get(header)!);
|
||||
}
|
||||
}
|
||||
|
||||
// Set the host header
|
||||
headers.set('Host', domain);
|
||||
|
||||
// Set Git user agent if not already present
|
||||
if (!headers.has('user-agent') || !headers.get('user-agent')?.startsWith('git/')) {
|
||||
headers.set('User-Agent', 'git/@isomorphic-git/cors-proxy');
|
||||
}
|
||||
|
||||
console.log('Request headers:', Object.fromEntries(headers.entries()));
|
||||
|
||||
// Prepare fetch options
|
||||
const fetchOptions: RequestInit = {
|
||||
method: request.method,
|
||||
headers,
|
||||
redirect: 'follow',
|
||||
};
|
||||
|
||||
// Add body for non-GET/HEAD requests
|
||||
if (!['GET', 'HEAD'].includes(request.method)) {
|
||||
fetchOptions.body = request.body;
|
||||
|
||||
/*
|
||||
* Note: duplex property is removed to ensure TypeScript compatibility
|
||||
* across different environments and versions
|
||||
*/
|
||||
}
|
||||
|
||||
// Forward the request to the target URL
|
||||
const response = await fetch(targetURL, fetchOptions);
|
||||
|
||||
console.log('Response status:', response.status);
|
||||
|
||||
// Create response headers
|
||||
const responseHeaders = new Headers();
|
||||
|
||||
// Add CORS headers
|
||||
responseHeaders.set('Access-Control-Allow-Origin', '*');
|
||||
responseHeaders.set('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
|
||||
responseHeaders.set('Access-Control-Allow-Headers', ALLOW_HEADERS.join(', '));
|
||||
responseHeaders.set('Access-Control-Expose-Headers', EXPOSE_HEADERS.join(', '));
|
||||
|
||||
// Copy exposed headers from the target response
|
||||
for (const header of EXPOSE_HEADERS) {
|
||||
// Skip content-length as we'll use the original response's content-length
|
||||
if (header === 'content-length') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (response.headers.has(header)) {
|
||||
responseHeaders.set(header, response.headers.get(header)!);
|
||||
}
|
||||
}
|
||||
|
||||
// If the response was redirected, add the x-redirected-url header
|
||||
if (response.redirected) {
|
||||
responseHeaders.set('x-redirected-url', response.url);
|
||||
}
|
||||
|
||||
console.log('Response headers:', Object.fromEntries(responseHeaders.entries()));
|
||||
|
||||
// Return the response with the target's body stream piped directly
|
||||
return new Response(response.body, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Git proxy error:', error);
|
||||
return json({ error: 'Proxy error' }, { status: 500 });
|
||||
console.error('Proxy error:', error);
|
||||
return json(
|
||||
{
|
||||
error: 'Proxy error',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
url: path ? `https://${path}` : 'Invalid URL',
|
||||
},
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,8 @@
|
||||
import type { LoaderFunctionArgs } from '@remix-run/node';
|
||||
import { json, type LoaderFunctionArgs } from '@remix-run/cloudflare';
|
||||
|
||||
export const loader = async ({ request: _request }: LoaderFunctionArgs) => {
|
||||
// Return a simple 200 OK response with some basic health information
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
uptime: process.uptime(),
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
);
|
||||
return json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
};
|
||||
|
@ -1,6 +1,5 @@
|
||||
import type { ActionFunctionArgs, LoaderFunction } from '@remix-run/cloudflare';
|
||||
import { json } from '@remix-run/cloudflare';
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
// These are injected by Vite at build time
|
||||
declare const __APP_VERSION: string;
|
||||
@ -11,34 +10,24 @@ declare const __PKG_DEPENDENCIES: Record<string, string>;
|
||||
declare const __PKG_DEV_DEPENDENCIES: Record<string, string>;
|
||||
declare const __PKG_PEER_DEPENDENCIES: Record<string, string>;
|
||||
declare const __PKG_OPTIONAL_DEPENDENCIES: Record<string, string>;
|
||||
declare const __COMMIT_HASH: string;
|
||||
declare const __GIT_BRANCH: string;
|
||||
declare const __GIT_COMMIT_TIME: string;
|
||||
declare const __GIT_AUTHOR: string;
|
||||
declare const __GIT_EMAIL: string;
|
||||
declare const __GIT_REMOTE_URL: string;
|
||||
declare const __GIT_REPO_NAME: string;
|
||||
|
||||
const getGitInfo = () => {
|
||||
try {
|
||||
return {
|
||||
commitHash: execSync('git rev-parse --short HEAD').toString().trim(),
|
||||
branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
|
||||
commitTime: execSync('git log -1 --format=%cd').toString().trim(),
|
||||
author: execSync('git log -1 --format=%an').toString().trim(),
|
||||
email: execSync('git log -1 --format=%ae').toString().trim(),
|
||||
remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
|
||||
repoName: execSync('git config --get remote.origin.url')
|
||||
.toString()
|
||||
.trim()
|
||||
.replace(/^.*github.com[:/]/, '')
|
||||
.replace(/\.git$/, ''),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to get git info:', error);
|
||||
return {
|
||||
commitHash: 'unknown',
|
||||
branch: 'unknown',
|
||||
commitTime: 'unknown',
|
||||
author: 'unknown',
|
||||
email: 'unknown',
|
||||
remoteUrl: 'unknown',
|
||||
repoName: 'unknown',
|
||||
};
|
||||
}
|
||||
return {
|
||||
commitHash: __COMMIT_HASH || 'unknown',
|
||||
branch: __GIT_BRANCH || 'unknown',
|
||||
commitTime: __GIT_COMMIT_TIME || 'unknown',
|
||||
author: __GIT_AUTHOR || 'unknown',
|
||||
email: __GIT_EMAIL || 'unknown',
|
||||
remoteUrl: __GIT_REMOTE_URL || 'unknown',
|
||||
repoName: __GIT_REPO_NAME || 'unknown',
|
||||
};
|
||||
};
|
||||
|
||||
const formatDependencies = (
|
||||
@ -60,11 +49,11 @@ const getAppResponse = () => {
|
||||
version: __APP_VERSION || '0.1.0',
|
||||
description: __PKG_DESCRIPTION || 'A DIY LLM interface',
|
||||
license: __PKG_LICENSE || 'MIT',
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
environment: 'cloudflare',
|
||||
gitInfo,
|
||||
timestamp: new Date().toISOString(),
|
||||
runtimeInfo: {
|
||||
nodeVersion: process.version || 'unknown',
|
||||
nodeVersion: 'cloudflare',
|
||||
},
|
||||
dependencies: {
|
||||
production: formatDependencies(__PKG_DEPENDENCIES, 'production'),
|
||||
|
@ -1,138 +1,48 @@
|
||||
import type { LoaderFunction } from '@remix-run/cloudflare';
|
||||
import { json } from '@remix-run/cloudflare';
|
||||
import { execSync } from 'child_process';
|
||||
import { json, type LoaderFunction } from '@remix-run/cloudflare';
|
||||
|
||||
interface GitHubRepoInfo {
|
||||
name: string;
|
||||
full_name: string;
|
||||
default_branch: string;
|
||||
stargazers_count: number;
|
||||
forks_count: number;
|
||||
open_issues_count: number;
|
||||
parent?: {
|
||||
full_name: string;
|
||||
default_branch: string;
|
||||
stargazers_count: number;
|
||||
forks_count: number;
|
||||
interface GitInfo {
|
||||
local: {
|
||||
commitHash: string;
|
||||
branch: string;
|
||||
commitTime: string;
|
||||
author: string;
|
||||
email: string;
|
||||
remoteUrl: string;
|
||||
repoName: string;
|
||||
};
|
||||
github?: {
|
||||
currentRepo?: {
|
||||
fullName: string;
|
||||
defaultBranch: string;
|
||||
stars: number;
|
||||
forks: number;
|
||||
openIssues?: number;
|
||||
};
|
||||
};
|
||||
isForked?: boolean;
|
||||
}
|
||||
|
||||
const getLocalGitInfo = () => {
|
||||
try {
|
||||
return {
|
||||
commitHash: execSync('git rev-parse HEAD').toString().trim(),
|
||||
branch: execSync('git rev-parse --abbrev-ref HEAD').toString().trim(),
|
||||
commitTime: execSync('git log -1 --format=%cd').toString().trim(),
|
||||
author: execSync('git log -1 --format=%an').toString().trim(),
|
||||
email: execSync('git log -1 --format=%ae').toString().trim(),
|
||||
remoteUrl: execSync('git config --get remote.origin.url').toString().trim(),
|
||||
repoName: execSync('git config --get remote.origin.url')
|
||||
.toString()
|
||||
.trim()
|
||||
.replace(/^.*github.com[:/]/, '')
|
||||
.replace(/\.git$/, ''),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Failed to get local git info:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
// These values will be replaced at build time
|
||||
declare const __COMMIT_HASH: string;
|
||||
declare const __GIT_BRANCH: string;
|
||||
declare const __GIT_COMMIT_TIME: string;
|
||||
declare const __GIT_AUTHOR: string;
|
||||
declare const __GIT_EMAIL: string;
|
||||
declare const __GIT_REMOTE_URL: string;
|
||||
declare const __GIT_REPO_NAME: string;
|
||||
|
||||
const getGitHubInfo = async (repoFullName: string) => {
|
||||
try {
|
||||
// Add GitHub token if available
|
||||
const headers: Record<string, string> = {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
};
|
||||
|
||||
const githubToken = process.env.GITHUB_TOKEN;
|
||||
|
||||
if (githubToken) {
|
||||
headers.Authorization = `token ${githubToken}`;
|
||||
}
|
||||
|
||||
console.log('Fetching GitHub info for:', repoFullName); // Debug log
|
||||
|
||||
const response = await fetch(`https://api.github.com/repos/${repoFullName}`, {
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.error('GitHub API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
repoFullName,
|
||||
});
|
||||
|
||||
// If we get a 404, try the main repo as fallback
|
||||
if (response.status === 404 && repoFullName !== 'stackblitz-labs/bolt.diy') {
|
||||
return getGitHubInfo('stackblitz-labs/bolt.diy');
|
||||
}
|
||||
|
||||
throw new Error(`GitHub API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log('GitHub API response:', data); // Debug log
|
||||
|
||||
return data as GitHubRepoInfo;
|
||||
} catch (error) {
|
||||
console.error('Failed to get GitHub info:', error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const loader: LoaderFunction = async ({ request: _request }) => {
|
||||
const localInfo = getLocalGitInfo();
|
||||
console.log('Local git info:', localInfo); // Debug log
|
||||
|
||||
// If we have local info, try to get GitHub info for both our fork and upstream
|
||||
let githubInfo = null;
|
||||
|
||||
if (localInfo?.repoName) {
|
||||
githubInfo = await getGitHubInfo(localInfo.repoName);
|
||||
}
|
||||
|
||||
// If no local info or GitHub info, try the main repo
|
||||
if (!githubInfo) {
|
||||
githubInfo = await getGitHubInfo('stackblitz-labs/bolt.diy');
|
||||
}
|
||||
|
||||
const response = {
|
||||
local: localInfo || {
|
||||
commitHash: 'unknown',
|
||||
branch: 'unknown',
|
||||
commitTime: 'unknown',
|
||||
author: 'unknown',
|
||||
email: 'unknown',
|
||||
remoteUrl: 'unknown',
|
||||
repoName: 'unknown',
|
||||
export const loader: LoaderFunction = async () => {
|
||||
const gitInfo: GitInfo = {
|
||||
local: {
|
||||
commitHash: typeof __COMMIT_HASH !== 'undefined' ? __COMMIT_HASH : 'development',
|
||||
branch: typeof __GIT_BRANCH !== 'undefined' ? __GIT_BRANCH : 'main',
|
||||
commitTime: typeof __GIT_COMMIT_TIME !== 'undefined' ? __GIT_COMMIT_TIME : new Date().toISOString(),
|
||||
author: typeof __GIT_AUTHOR !== 'undefined' ? __GIT_AUTHOR : 'development',
|
||||
email: typeof __GIT_EMAIL !== 'undefined' ? __GIT_EMAIL : 'development@local',
|
||||
remoteUrl: typeof __GIT_REMOTE_URL !== 'undefined' ? __GIT_REMOTE_URL : 'local',
|
||||
repoName: typeof __GIT_REPO_NAME !== 'undefined' ? __GIT_REPO_NAME : 'bolt.diy',
|
||||
},
|
||||
github: githubInfo
|
||||
? {
|
||||
currentRepo: {
|
||||
fullName: githubInfo.full_name,
|
||||
defaultBranch: githubInfo.default_branch,
|
||||
stars: githubInfo.stargazers_count,
|
||||
forks: githubInfo.forks_count,
|
||||
openIssues: githubInfo.open_issues_count,
|
||||
},
|
||||
upstream: githubInfo.parent
|
||||
? {
|
||||
fullName: githubInfo.parent.full_name,
|
||||
defaultBranch: githubInfo.parent.default_branch,
|
||||
stars: githubInfo.parent.stargazers_count,
|
||||
forks: githubInfo.parent.forks_count,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
: null,
|
||||
isForked: Boolean(githubInfo?.parent),
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
console.log('Final response:', response);
|
||||
|
||||
// Debug log
|
||||
return json(response);
|
||||
return json(gitInfo);
|
||||
};
|
||||
|
@ -1,573 +1,21 @@
|
||||
import { json } from '@remix-run/node';
|
||||
import type { ActionFunction } from '@remix-run/node';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
interface UpdateRequestBody {
|
||||
branch: string;
|
||||
autoUpdate?: boolean;
|
||||
}
|
||||
|
||||
interface UpdateProgress {
|
||||
stage: 'fetch' | 'pull' | 'install' | 'build' | 'complete';
|
||||
message: string;
|
||||
progress?: number;
|
||||
error?: string;
|
||||
details?: {
|
||||
changedFiles?: string[];
|
||||
additions?: number;
|
||||
deletions?: number;
|
||||
commitMessages?: string[];
|
||||
totalSize?: string;
|
||||
currentCommit?: string;
|
||||
remoteCommit?: string;
|
||||
updateReady?: boolean;
|
||||
changelog?: string;
|
||||
compareUrl?: string;
|
||||
};
|
||||
}
|
||||
import { json, type ActionFunction } from '@remix-run/cloudflare';
|
||||
|
||||
export const action: ActionFunction = async ({ request }) => {
|
||||
if (request.method !== 'POST') {
|
||||
return json({ error: 'Method not allowed' }, { status: 405 });
|
||||
}
|
||||
|
||||
try {
|
||||
const body = await request.json();
|
||||
|
||||
if (!body || typeof body !== 'object' || !('branch' in body) || typeof body.branch !== 'string') {
|
||||
return json({ error: 'Invalid request body: branch is required and must be a string' }, { status: 400 });
|
||||
}
|
||||
|
||||
const { branch, autoUpdate = false } = body as UpdateRequestBody;
|
||||
|
||||
// Create a ReadableStream to send progress updates
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const encoder = new TextEncoder();
|
||||
const sendProgress = (update: UpdateProgress) => {
|
||||
controller.enqueue(encoder.encode(JSON.stringify(update) + '\n'));
|
||||
};
|
||||
|
||||
try {
|
||||
// Initial check stage
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Checking repository status...',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
// Check if remote exists
|
||||
let defaultBranch = branch || 'main'; // Make branch mutable
|
||||
|
||||
try {
|
||||
await execAsync('git remote get-url upstream');
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Repository remote verified',
|
||||
progress: 10,
|
||||
});
|
||||
} catch {
|
||||
throw new Error(
|
||||
'No upstream repository found. Please set up the upstream repository first by running:\ngit remote add upstream https://github.com/stackblitz-labs/bolt.diy.git',
|
||||
);
|
||||
}
|
||||
|
||||
// Get default branch if not specified
|
||||
if (!branch) {
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Detecting default branch...',
|
||||
progress: 20,
|
||||
});
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync('git remote show upstream | grep "HEAD branch" | cut -d" " -f5');
|
||||
defaultBranch = stdout.trim() || 'main';
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: `Using branch: ${defaultBranch}`,
|
||||
progress: 30,
|
||||
});
|
||||
} catch {
|
||||
defaultBranch = 'main'; // Fallback to main if we can't detect
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Using default branch: main',
|
||||
progress: 30,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch stage
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Fetching latest changes...',
|
||||
progress: 40,
|
||||
});
|
||||
|
||||
// Fetch all remotes
|
||||
await execAsync('git fetch --all');
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Remote changes fetched',
|
||||
progress: 50,
|
||||
});
|
||||
|
||||
// Check if remote branch exists
|
||||
try {
|
||||
await execAsync(`git rev-parse --verify upstream/${defaultBranch}`);
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Remote branch verified',
|
||||
progress: 60,
|
||||
});
|
||||
} catch {
|
||||
throw new Error(
|
||||
`Remote branch 'upstream/${defaultBranch}' not found. Please ensure the upstream repository is properly configured.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Get current commit hash and remote commit hash
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Comparing versions...',
|
||||
progress: 70,
|
||||
});
|
||||
|
||||
const { stdout: currentCommit } = await execAsync('git rev-parse HEAD');
|
||||
const { stdout: remoteCommit } = await execAsync(`git rev-parse upstream/${defaultBranch}`);
|
||||
|
||||
// If we're on the same commit, no update is available
|
||||
if (currentCommit.trim() === remoteCommit.trim()) {
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: 'No updates available. You are on the latest version.',
|
||||
progress: 100,
|
||||
details: {
|
||||
currentCommit: currentCommit.trim().substring(0, 7),
|
||||
remoteCommit: remoteCommit.trim().substring(0, 7),
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Analyzing changes...',
|
||||
progress: 80,
|
||||
});
|
||||
|
||||
// Initialize variables
|
||||
let changedFiles: string[] = [];
|
||||
let commitMessages: string[] = [];
|
||||
let stats: RegExpMatchArray | null = null;
|
||||
let totalSizeInBytes = 0;
|
||||
|
||||
// Format size for display
|
||||
const formatSize = (bytes: number) => {
|
||||
if (bytes === 0) {
|
||||
return '0 B';
|
||||
}
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['B', 'KB', 'MB', 'GB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
|
||||
};
|
||||
|
||||
// Get list of changed files and their sizes
|
||||
try {
|
||||
const { stdout: diffOutput } = await execAsync(
|
||||
`git diff --name-status ${currentCommit.trim()}..${remoteCommit.trim()}`,
|
||||
);
|
||||
const files = diffOutput.split('\n').filter(Boolean);
|
||||
|
||||
if (files.length === 0) {
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: `No file changes detected between your version and upstream/${defaultBranch}. You might be on a different branch.`,
|
||||
progress: 100,
|
||||
details: {
|
||||
currentCommit: currentCommit.trim().substring(0, 7),
|
||||
remoteCommit: remoteCommit.trim().substring(0, 7),
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: `Found ${files.length} changed files, calculating sizes...`,
|
||||
progress: 90,
|
||||
});
|
||||
|
||||
// Get size information for each changed file
|
||||
for (const line of files) {
|
||||
const [status, file] = line.split('\t');
|
||||
|
||||
if (status !== 'D') {
|
||||
// Skip deleted files
|
||||
try {
|
||||
const { stdout: sizeOutput } = await execAsync(`git cat-file -s ${remoteCommit.trim()}:${file}`);
|
||||
const size = parseInt(sizeOutput) || 0;
|
||||
totalSizeInBytes += size;
|
||||
} catch {
|
||||
console.debug(`Could not get size for file: ${file}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
changedFiles = files.map((line) => {
|
||||
const [status, file] = line.split('\t');
|
||||
return `${status === 'M' ? 'Modified' : status === 'A' ? 'Added' : 'Deleted'}: ${file}`;
|
||||
});
|
||||
} catch (err) {
|
||||
console.debug('Failed to get changed files:', err);
|
||||
throw new Error(`Failed to compare changes with upstream/${defaultBranch}. Are you on the correct branch?`);
|
||||
}
|
||||
|
||||
// Get commit messages between current and remote
|
||||
try {
|
||||
const { stdout: logOutput } = await execAsync(
|
||||
`git log --pretty=format:"%h|%s|%aI" ${currentCommit.trim()}..${remoteCommit.trim()}`,
|
||||
);
|
||||
|
||||
// Parse and group commits by type
|
||||
const commits = logOutput
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map((line) => {
|
||||
const [hash, subject, timestamp] = line.split('|');
|
||||
let type = 'other';
|
||||
let message = subject;
|
||||
|
||||
if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
|
||||
type = 'feature';
|
||||
message = subject.replace(/^feat(?:ure)?:/, '').trim();
|
||||
} else if (subject.startsWith('fix:')) {
|
||||
type = 'fix';
|
||||
message = subject.replace(/^fix:/, '').trim();
|
||||
} else if (subject.startsWith('docs:')) {
|
||||
type = 'docs';
|
||||
message = subject.replace(/^docs:/, '').trim();
|
||||
} else if (subject.startsWith('style:')) {
|
||||
type = 'style';
|
||||
message = subject.replace(/^style:/, '').trim();
|
||||
} else if (subject.startsWith('refactor:')) {
|
||||
type = 'refactor';
|
||||
message = subject.replace(/^refactor:/, '').trim();
|
||||
} else if (subject.startsWith('perf:')) {
|
||||
type = 'perf';
|
||||
message = subject.replace(/^perf:/, '').trim();
|
||||
} else if (subject.startsWith('test:')) {
|
||||
type = 'test';
|
||||
message = subject.replace(/^test:/, '').trim();
|
||||
} else if (subject.startsWith('build:')) {
|
||||
type = 'build';
|
||||
message = subject.replace(/^build:/, '').trim();
|
||||
} else if (subject.startsWith('ci:')) {
|
||||
type = 'ci';
|
||||
message = subject.replace(/^ci:/, '').trim();
|
||||
}
|
||||
|
||||
return {
|
||||
hash,
|
||||
type,
|
||||
message,
|
||||
timestamp: new Date(timestamp),
|
||||
};
|
||||
});
|
||||
|
||||
// Group commits by type
|
||||
const groupedCommits = commits.reduce(
|
||||
(acc, commit) => {
|
||||
if (!acc[commit.type]) {
|
||||
acc[commit.type] = [];
|
||||
}
|
||||
|
||||
acc[commit.type].push(commit);
|
||||
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, typeof commits>,
|
||||
);
|
||||
|
||||
// Format commit messages with emojis and timestamps
|
||||
const formattedMessages = Object.entries(groupedCommits).map(([type, commits]) => {
|
||||
const emoji = {
|
||||
feature: '✨',
|
||||
fix: '🐛',
|
||||
docs: '📚',
|
||||
style: '💎',
|
||||
refactor: '♻️',
|
||||
perf: '⚡',
|
||||
test: '🧪',
|
||||
build: '🛠️',
|
||||
ci: '⚙️',
|
||||
other: '🔍',
|
||||
}[type];
|
||||
|
||||
const title = {
|
||||
feature: 'Features',
|
||||
fix: 'Bug Fixes',
|
||||
docs: 'Documentation',
|
||||
style: 'Styles',
|
||||
refactor: 'Code Refactoring',
|
||||
perf: 'Performance',
|
||||
test: 'Tests',
|
||||
build: 'Build',
|
||||
ci: 'CI',
|
||||
other: 'Other Changes',
|
||||
}[type];
|
||||
|
||||
return `### ${emoji} ${title}\n\n${commits
|
||||
.map((c) => `* ${c.message} (${c.hash.substring(0, 7)}) - ${c.timestamp.toLocaleString()}`)
|
||||
.join('\n')}`;
|
||||
});
|
||||
|
||||
commitMessages = formattedMessages;
|
||||
} catch {
|
||||
// Handle silently - empty commitMessages array will be used
|
||||
}
|
||||
|
||||
// Get diff stats using the specific commits
|
||||
try {
|
||||
const { stdout: diffStats } = await execAsync(
|
||||
`git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`,
|
||||
);
|
||||
stats = diffStats.match(
|
||||
/(\d+) files? changed(?:, (\d+) insertions?\\(\\+\\))?(?:, (\d+) deletions?\\(-\\))?/,
|
||||
);
|
||||
} catch {
|
||||
// Handle silently - null stats will be used
|
||||
}
|
||||
|
||||
// If we somehow still have no changes detected
|
||||
if (!stats && changedFiles.length === 0) {
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: `No changes detected between your version and upstream/${defaultBranch}. This might be unexpected - please check your git status.`,
|
||||
progress: 100,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch changelog
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: 'Fetching changelog...',
|
||||
progress: 95,
|
||||
});
|
||||
|
||||
const changelog = await fetchChangelog(currentCommit.trim(), remoteCommit.trim());
|
||||
|
||||
// We have changes, send the details
|
||||
sendProgress({
|
||||
stage: 'fetch',
|
||||
message: `Changes detected on upstream/${defaultBranch}`,
|
||||
progress: 100,
|
||||
details: {
|
||||
changedFiles,
|
||||
additions: stats?.[2] ? parseInt(stats[2]) : 0,
|
||||
deletions: stats?.[3] ? parseInt(stats[3]) : 0,
|
||||
commitMessages,
|
||||
totalSize: formatSize(totalSizeInBytes),
|
||||
currentCommit: currentCommit.trim().substring(0, 7),
|
||||
remoteCommit: remoteCommit.trim().substring(0, 7),
|
||||
updateReady: true,
|
||||
changelog,
|
||||
compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
|
||||
},
|
||||
});
|
||||
|
||||
// Only proceed with update if autoUpdate is true
|
||||
if (!autoUpdate) {
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: 'Update is ready to be applied. Click "Update Now" to proceed.',
|
||||
progress: 100,
|
||||
details: {
|
||||
changedFiles,
|
||||
additions: stats?.[2] ? parseInt(stats[2]) : 0,
|
||||
deletions: stats?.[3] ? parseInt(stats[3]) : 0,
|
||||
commitMessages,
|
||||
totalSize: formatSize(totalSizeInBytes),
|
||||
currentCommit: currentCommit.trim().substring(0, 7),
|
||||
remoteCommit: remoteCommit.trim().substring(0, 7),
|
||||
updateReady: true,
|
||||
changelog,
|
||||
compareUrl: `https://github.com/stackblitz-labs/bolt.diy/compare/${currentCommit.trim().substring(0, 7)}...${remoteCommit.trim().substring(0, 7)}`,
|
||||
},
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Pull stage
|
||||
sendProgress({
|
||||
stage: 'pull',
|
||||
message: `Pulling changes from upstream/${defaultBranch}...`,
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await execAsync(`git pull upstream ${defaultBranch}`);
|
||||
|
||||
sendProgress({
|
||||
stage: 'pull',
|
||||
message: 'Changes pulled successfully',
|
||||
progress: 100,
|
||||
});
|
||||
|
||||
// Install stage
|
||||
sendProgress({
|
||||
stage: 'install',
|
||||
message: 'Installing dependencies...',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await execAsync('pnpm install');
|
||||
|
||||
sendProgress({
|
||||
stage: 'install',
|
||||
message: 'Dependencies installed successfully',
|
||||
progress: 100,
|
||||
});
|
||||
|
||||
// Build stage
|
||||
sendProgress({
|
||||
stage: 'build',
|
||||
message: 'Building application...',
|
||||
progress: 0,
|
||||
});
|
||||
|
||||
await execAsync('pnpm build');
|
||||
|
||||
sendProgress({
|
||||
stage: 'build',
|
||||
message: 'Build completed successfully',
|
||||
progress: 100,
|
||||
});
|
||||
|
||||
// Complete
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: 'Update completed successfully! Click Restart to apply changes.',
|
||||
progress: 100,
|
||||
});
|
||||
} catch (err) {
|
||||
sendProgress({
|
||||
stage: 'complete',
|
||||
message: 'Update failed',
|
||||
error: err instanceof Error ? err.message : 'Unknown error occurred',
|
||||
});
|
||||
} finally {
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
console.error('Update preparation failed:', err);
|
||||
return json(
|
||||
{
|
||||
success: false,
|
||||
error: err instanceof Error ? err.message : 'Unknown error occurred while preparing update',
|
||||
},
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
return json(
|
||||
{
|
||||
error: 'Updates must be performed manually in a server environment',
|
||||
instructions: [
|
||||
'1. Navigate to the project directory',
|
||||
'2. Run: git fetch upstream',
|
||||
'3. Run: git pull upstream main',
|
||||
'4. Run: pnpm install',
|
||||
'5. Run: pnpm run build',
|
||||
],
|
||||
},
|
||||
{ status: 400 },
|
||||
);
|
||||
};
|
||||
|
||||
// Add this function to fetch the changelog
|
||||
async function fetchChangelog(currentCommit: string, remoteCommit: string): Promise<string> {
|
||||
try {
|
||||
// First try to get the changelog.md content
|
||||
const { stdout: changelogContent } = await execAsync('git show upstream/main:changelog.md');
|
||||
|
||||
// If we have a changelog, return it
|
||||
if (changelogContent) {
|
||||
return changelogContent;
|
||||
}
|
||||
|
||||
// If no changelog.md, generate one in a similar format
|
||||
let changelog = '# Changes in this Update\n\n';
|
||||
|
||||
// Get commit messages grouped by type
|
||||
const { stdout: commitLog } = await execAsync(
|
||||
`git log --pretty=format:"%h|%s|%b" ${currentCommit.trim()}..${remoteCommit.trim()}`,
|
||||
);
|
||||
|
||||
const commits = commitLog.split('\n').filter(Boolean);
|
||||
const categorizedCommits: Record<string, string[]> = {
|
||||
'✨ Features': [],
|
||||
'🐛 Bug Fixes': [],
|
||||
'📚 Documentation': [],
|
||||
'💎 Styles': [],
|
||||
'♻️ Code Refactoring': [],
|
||||
'⚡ Performance': [],
|
||||
'🧪 Tests': [],
|
||||
'🛠️ Build': [],
|
||||
'⚙️ CI': [],
|
||||
'🔍 Other Changes': [],
|
||||
};
|
||||
|
||||
// Categorize commits
|
||||
for (const commit of commits) {
|
||||
const [hash, subject] = commit.split('|');
|
||||
let category = '🔍 Other Changes';
|
||||
|
||||
if (subject.startsWith('feat:') || subject.startsWith('feature:')) {
|
||||
category = '✨ Features';
|
||||
} else if (subject.startsWith('fix:')) {
|
||||
category = '🐛 Bug Fixes';
|
||||
} else if (subject.startsWith('docs:')) {
|
||||
category = '📚 Documentation';
|
||||
} else if (subject.startsWith('style:')) {
|
||||
category = '💎 Styles';
|
||||
} else if (subject.startsWith('refactor:')) {
|
||||
category = '♻️ Code Refactoring';
|
||||
} else if (subject.startsWith('perf:')) {
|
||||
category = '⚡ Performance';
|
||||
} else if (subject.startsWith('test:')) {
|
||||
category = '🧪 Tests';
|
||||
} else if (subject.startsWith('build:')) {
|
||||
category = '🛠️ Build';
|
||||
} else if (subject.startsWith('ci:')) {
|
||||
category = '⚙️ CI';
|
||||
}
|
||||
|
||||
const message = subject.includes(':') ? subject.split(':')[1].trim() : subject.trim();
|
||||
categorizedCommits[category].push(`* ${message} (${hash.substring(0, 7)})`);
|
||||
}
|
||||
|
||||
// Build changelog content
|
||||
for (const [category, commits] of Object.entries(categorizedCommits)) {
|
||||
if (commits.length > 0) {
|
||||
changelog += `\n## ${category}\n\n${commits.join('\n')}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add stats
|
||||
const { stdout: stats } = await execAsync(`git diff --shortstat ${currentCommit.trim()}..${remoteCommit.trim()}`);
|
||||
|
||||
if (stats) {
|
||||
changelog += '\n## 📊 Stats\n\n';
|
||||
changelog += `${stats.trim()}\n`;
|
||||
}
|
||||
|
||||
return changelog;
|
||||
} catch (error) {
|
||||
console.error('Error fetching changelog:', error);
|
||||
return 'Unable to fetch changelog';
|
||||
}
|
||||
}
|
||||
|
25
assets/entitlements.mac.plist
Normal file
25
assets/entitlements.mac.plist
Normal file
@ -0,0 +1,25 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Allows Just-In-Time compilation required by V8 JavaScript engine in Electron -->
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
|
||||
<!-- This is needed for the V8 JavaScript engine to function properly -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allows network connections -->
|
||||
<key>com.apple.security.network.client</key>
|
||||
<true/>
|
||||
|
||||
<!-- Uncomment to allow read and write access to files explicitly selected by the user through system dialogs -->
|
||||
<!-- <key>com.apple.security.files.user-selected.read-write</key>
|
||||
<true/> -->
|
||||
|
||||
<!-- Uncomment to allow read and write access to the user's Downloads directory -->
|
||||
<!-- <key>com.apple.security.files.downloads.read-write</key>
|
||||
<true/> -->
|
||||
</dict>
|
||||
</plist>
|
BIN
assets/icons/icon.icns
Normal file
BIN
assets/icons/icon.icns
Normal file
Binary file not shown.
BIN
assets/icons/icon.ico
Normal file
BIN
assets/icons/icon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 168 KiB |
BIN
assets/icons/icon.png
Normal file
BIN
assets/icons/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 18 KiB |
65
electron-builder.yml
Normal file
65
electron-builder.yml
Normal file
@ -0,0 +1,65 @@
|
||||
appId: com.stackblitz.bolt.diy
|
||||
productName: Bolt Local
|
||||
directories:
|
||||
buildResources: build
|
||||
output: dist
|
||||
files:
|
||||
- build/**/*
|
||||
- package.json
|
||||
- node_modules/**/*
|
||||
- icons/**
|
||||
- electron-update.yml
|
||||
extraMetadata:
|
||||
main: build/electron/main/index.mjs
|
||||
asarUnpack:
|
||||
- resources/**
|
||||
- build/client/**/*
|
||||
- build/server/**/*
|
||||
- electron-update.yml
|
||||
|
||||
mac:
|
||||
icon: assets/icons/icon.icns
|
||||
target:
|
||||
- dmg
|
||||
identity: "Xinzhe Wang (RDQSC33B2X)"
|
||||
category: "public.app-category.developer-tools"
|
||||
type: "distribution"
|
||||
hardenedRuntime: true
|
||||
entitlements: "assets/entitlements.mac.plist"
|
||||
entitlementsInherit: "assets/entitlements.mac.plist"
|
||||
gatekeeperAssess: false
|
||||
|
||||
win:
|
||||
icon: assets/icons/icon.ico
|
||||
target:
|
||||
- nsis
|
||||
signDlls: false
|
||||
artifactName: ${name}-${version}-${os}-${arch}.${ext}
|
||||
|
||||
linux:
|
||||
icon: assets/icons/icon.png
|
||||
target:
|
||||
- AppImage
|
||||
- deb
|
||||
artifactName: ${name}-${version}-${os}-${arch}.${ext}
|
||||
category: Development
|
||||
|
||||
nsis:
|
||||
oneClick: false
|
||||
allowToChangeInstallationDirectory: true
|
||||
createDesktopShortcut: true
|
||||
createStartMenuShortcut: true
|
||||
shortcutName: ${productName}
|
||||
artifactName: ${name}-${version}-${os}-${arch}-setup.${ext}
|
||||
|
||||
npmRebuild: false
|
||||
|
||||
publish:
|
||||
provider: github
|
||||
owner: Derek-X-Wang
|
||||
repo: bolt.local
|
||||
private: true
|
||||
releaseType: release
|
||||
|
||||
electronDownload:
|
||||
mirror: https://npmmirror.com/mirrors/electron/
|
4
electron-update.yml
Normal file
4
electron-update.yml
Normal file
@ -0,0 +1,4 @@
|
||||
owner: stackblitz-labs
|
||||
repo: bolt.diy
|
||||
provider: github
|
||||
private: false
|
201
electron/main/index.ts
Normal file
201
electron/main/index.ts
Normal file
@ -0,0 +1,201 @@
|
||||
/// <reference types="vite/client" />
|
||||
import { createRequestHandler } from '@remix-run/node';
|
||||
import electron, { app, BrowserWindow, ipcMain, protocol, session } from 'electron';
|
||||
import log from 'electron-log';
|
||||
import path from 'node:path';
|
||||
import * as pkg from '../../package.json';
|
||||
import { setupAutoUpdater } from './utils/auto-update';
|
||||
import { isDev, DEFAULT_PORT } from './utils/constants';
|
||||
import { initViteServer, viteServer } from './utils/vite-server';
|
||||
import { setupMenu } from './ui/menu';
|
||||
import { createWindow } from './ui/window';
|
||||
import { initCookies, storeCookies } from './utils/cookie';
|
||||
import { loadServerBuild, serveAsset } from './utils/serve';
|
||||
import { reloadOnChange } from './utils/reload';
|
||||
|
||||
Object.assign(console, log.functions);
|
||||
|
||||
console.debug('main: import.meta.env:', import.meta.env);
|
||||
console.log('main: isDev:', isDev);
|
||||
console.log('NODE_ENV:', global.process.env.NODE_ENV);
|
||||
console.log('isPackaged:', app.isPackaged);
|
||||
|
||||
// Log unhandled errors
|
||||
process.on('uncaughtException', async (error) => {
|
||||
console.log('Uncaught Exception:', error);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', async (error) => {
|
||||
console.log('Unhandled Rejection:', error);
|
||||
});
|
||||
|
||||
(() => {
|
||||
const root = global.process.env.APP_PATH_ROOT ?? import.meta.env.VITE_APP_PATH_ROOT;
|
||||
|
||||
if (root === undefined) {
|
||||
console.log('no given APP_PATH_ROOT or VITE_APP_PATH_ROOT. default path is used.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(root)) {
|
||||
console.log('APP_PATH_ROOT must be absolute path.');
|
||||
global.process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`APP_PATH_ROOT: ${root}`);
|
||||
|
||||
const subdirName = pkg.name;
|
||||
|
||||
for (const [key, val] of [
|
||||
['appData', ''],
|
||||
['userData', subdirName],
|
||||
['sessionData', subdirName],
|
||||
] as const) {
|
||||
app.setPath(key, path.join(root, val));
|
||||
}
|
||||
|
||||
app.setAppLogsPath(path.join(root, subdirName, 'Logs'));
|
||||
})();
|
||||
|
||||
console.log('appPath:', app.getAppPath());
|
||||
|
||||
const keys: Parameters<typeof app.getPath>[number][] = ['home', 'appData', 'userData', 'sessionData', 'logs', 'temp'];
|
||||
keys.forEach((key) => console.log(`${key}:`, app.getPath(key)));
|
||||
console.log('start whenReady');
|
||||
|
||||
declare global {
|
||||
// eslint-disable-next-line no-var, @typescript-eslint/naming-convention
|
||||
var __electron__: typeof electron;
|
||||
}
|
||||
|
||||
(async () => {
|
||||
await app.whenReady();
|
||||
console.log('App is ready');
|
||||
|
||||
// Load any existing cookies from ElectronStore, set as cookie
|
||||
await initCookies();
|
||||
|
||||
const serverBuild = await loadServerBuild();
|
||||
|
||||
protocol.handle('http', async (req) => {
|
||||
console.log('Handling request for:', req.url);
|
||||
|
||||
if (isDev) {
|
||||
console.log('Dev mode: forwarding to vite server');
|
||||
return await fetch(req);
|
||||
}
|
||||
|
||||
req.headers.append('Referer', req.referrer);
|
||||
|
||||
try {
|
||||
const url = new URL(req.url);
|
||||
|
||||
// Forward requests to specific local server ports
|
||||
if (url.port !== `${DEFAULT_PORT}`) {
|
||||
console.log('Forwarding request to local server:', req.url);
|
||||
return await fetch(req);
|
||||
}
|
||||
|
||||
// Always try to serve asset first
|
||||
const assetPath = path.join(app.getAppPath(), 'build', 'client');
|
||||
const res = await serveAsset(req, assetPath);
|
||||
|
||||
if (res) {
|
||||
console.log('Served asset:', req.url);
|
||||
return res;
|
||||
}
|
||||
|
||||
// Forward all cookies to remix server
|
||||
const cookies = await session.defaultSession.cookies.get({});
|
||||
|
||||
if (cookies.length > 0) {
|
||||
req.headers.set('Cookie', cookies.map((c) => `${c.name}=${c.value}`).join('; '));
|
||||
|
||||
// Store all cookies
|
||||
await storeCookies(cookies);
|
||||
}
|
||||
|
||||
// Create request handler with the server build
|
||||
const handler = createRequestHandler(serverBuild, 'production');
|
||||
console.log('Handling request with server build:', req.url);
|
||||
|
||||
const result = await handler(req, {
|
||||
/*
|
||||
* Remix app access cloudflare.env
|
||||
* Need to pass an empty object to prevent undefined
|
||||
*/
|
||||
// @ts-ignore:next-line
|
||||
cloudflare: {},
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
console.log('Error handling request:', {
|
||||
url: req.url,
|
||||
error:
|
||||
err instanceof Error
|
||||
? {
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
cause: err.cause,
|
||||
}
|
||||
: err,
|
||||
});
|
||||
|
||||
const error = err instanceof Error ? err : new Error(String(err));
|
||||
|
||||
return new Response(`Error handling request to ${req.url}: ${error.stack ?? error.message}`, {
|
||||
status: 500,
|
||||
headers: { 'content-type': 'text/plain' },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const rendererURL = await (isDev
|
||||
? (async () => {
|
||||
await initViteServer();
|
||||
|
||||
if (!viteServer) {
|
||||
throw new Error('Vite server is not initialized');
|
||||
}
|
||||
|
||||
const listen = await viteServer.listen();
|
||||
global.__electron__ = electron;
|
||||
viteServer.printUrls();
|
||||
|
||||
return `http://localhost:${listen.config.server.port}`;
|
||||
})()
|
||||
: `http://localhost:${DEFAULT_PORT}`);
|
||||
|
||||
console.log('Using renderer URL:', rendererURL);
|
||||
|
||||
const win = await createWindow(rendererURL);
|
||||
|
||||
app.on('activate', async () => {
|
||||
if (BrowserWindow.getAllWindows().length === 0) {
|
||||
await createWindow(rendererURL);
|
||||
}
|
||||
});
|
||||
|
||||
console.log('end whenReady');
|
||||
|
||||
return win;
|
||||
})()
|
||||
.then((win) => {
|
||||
// IPC samples : send and recieve.
|
||||
let count = 0;
|
||||
setInterval(() => win.webContents.send('ping', `hello from main! ${count++}`), 60 * 1000);
|
||||
ipcMain.handle('ipcTest', (event, ...args) => console.log('ipc: renderer -> main', { event, ...args }));
|
||||
|
||||
return win;
|
||||
})
|
||||
.then((win) => setupMenu(win));
|
||||
|
||||
app.on('window-all-closed', () => {
|
||||
if (process.platform !== 'darwin') {
|
||||
app.quit();
|
||||
}
|
||||
});
|
||||
|
||||
reloadOnChange();
|
||||
setupAutoUpdater();
|
30
electron/main/tsconfig.json
Normal file
30
electron/main/tsconfig.json
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"include": ["."],
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"jsx": "preserve",
|
||||
"target": "ESNext",
|
||||
"noEmit": true,
|
||||
"skipLibCheck": true,
|
||||
"useDefineForClassFields": true,
|
||||
|
||||
/* modules */
|
||||
"moduleResolution": "Bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"resolveJsonModule": true,
|
||||
"module": "ESNext",
|
||||
"isolatedModules": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"declaration": true,
|
||||
"declarationDir": "./dist",
|
||||
|
||||
/* type checking */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"forceConsistentCasingInFileNames": true
|
||||
}
|
||||
}
|
29
electron/main/ui/menu.ts
Normal file
29
electron/main/ui/menu.ts
Normal file
@ -0,0 +1,29 @@
|
||||
import { BrowserWindow, Menu } from 'electron';
|
||||
|
||||
export function setupMenu(win: BrowserWindow): void {
|
||||
const app = Menu.getApplicationMenu();
|
||||
Menu.setApplicationMenu(
|
||||
Menu.buildFromTemplate([
|
||||
...(app ? app.items : []),
|
||||
{
|
||||
label: 'Go',
|
||||
submenu: [
|
||||
{
|
||||
label: 'Back',
|
||||
accelerator: 'CmdOrCtrl+[',
|
||||
click: () => {
|
||||
win?.webContents.navigationHistory.goBack();
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Forward',
|
||||
accelerator: 'CmdOrCtrl+]',
|
||||
click: () => {
|
||||
win?.webContents.navigationHistory.goForward();
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
]),
|
||||
);
|
||||
}
|
51
electron/main/ui/window.ts
Normal file
51
electron/main/ui/window.ts
Normal file
@ -0,0 +1,51 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
import path from 'node:path';
|
||||
import { isDev } from '../utils/constants';
|
||||
import { store } from '../utils/store';
|
||||
|
||||
export function createWindow(rendererURL: string) {
|
||||
console.log('Creating window with URL:', rendererURL);
|
||||
|
||||
const bounds = store.get('bounds');
|
||||
console.log('restored bounds:', bounds);
|
||||
|
||||
const win = new BrowserWindow({
|
||||
...{
|
||||
width: 1200,
|
||||
height: 800,
|
||||
...bounds,
|
||||
},
|
||||
vibrancy: 'under-window',
|
||||
visualEffectState: 'active',
|
||||
webPreferences: {
|
||||
preload: path.join(app.getAppPath(), 'build', 'electron', 'preload', 'index.cjs'),
|
||||
},
|
||||
});
|
||||
|
||||
console.log('Window created, loading URL...');
|
||||
win.loadURL(rendererURL).catch((err) => {
|
||||
console.log('Failed to load URL:', err);
|
||||
});
|
||||
|
||||
win.webContents.on('did-fail-load', (_, errorCode, errorDescription) => {
|
||||
console.log('Failed to load:', errorCode, errorDescription);
|
||||
});
|
||||
|
||||
win.webContents.on('did-finish-load', () => {
|
||||
console.log('Window finished loading');
|
||||
});
|
||||
|
||||
// Open devtools in development
|
||||
if (isDev) {
|
||||
win.webContents.openDevTools();
|
||||
}
|
||||
|
||||
const boundsListener = () => {
|
||||
const bounds = win.getBounds();
|
||||
store.set('bounds', bounds);
|
||||
};
|
||||
win.on('moved', boundsListener);
|
||||
win.on('resized', boundsListener);
|
||||
|
||||
return win;
|
||||
}
|
110
electron/main/utils/auto-update.ts
Normal file
110
electron/main/utils/auto-update.ts
Normal file
@ -0,0 +1,110 @@
|
||||
import logger from 'electron-log';
|
||||
import type { MessageBoxOptions } from 'electron';
|
||||
import { app, dialog } from 'electron';
|
||||
import type { AppUpdater, UpdateDownloadedEvent, UpdateInfo } from 'electron-updater';
|
||||
import path from 'node:path';
|
||||
|
||||
// NOTE: workaround to use electron-updater.
|
||||
import * as electronUpdater from 'electron-updater';
|
||||
import { isDev } from './constants';
|
||||
|
||||
const autoUpdater: AppUpdater = (electronUpdater as any).default.autoUpdater;
|
||||
|
||||
export async function setupAutoUpdater() {
|
||||
// Configure logger
|
||||
logger.transports.file.level = 'debug';
|
||||
autoUpdater.logger = logger;
|
||||
|
||||
// Configure custom update config file
|
||||
const resourcePath = isDev
|
||||
? path.join(process.cwd(), 'electron-update.yml')
|
||||
: path.join(app.getAppPath(), 'electron-update.yml');
|
||||
logger.info('Update config path:', resourcePath);
|
||||
autoUpdater.updateConfigPath = resourcePath;
|
||||
|
||||
// Disable auto download - we want to ask user first
|
||||
autoUpdater.autoDownload = false;
|
||||
autoUpdater.autoInstallOnAppQuit = true;
|
||||
|
||||
autoUpdater.on('checking-for-update', () => {
|
||||
logger.info('checking-for-update...');
|
||||
});
|
||||
|
||||
autoUpdater.on('update-available', async (info: UpdateInfo) => {
|
||||
logger.info('Update available.', info);
|
||||
|
||||
const dialogOpts: MessageBoxOptions = {
|
||||
type: 'info' as const,
|
||||
buttons: ['Update', 'Later'],
|
||||
title: 'Application Update',
|
||||
message: `Version ${info.version} is available.`,
|
||||
detail: 'A new version is available. Would you like to update now?',
|
||||
};
|
||||
|
||||
const response = await dialog.showMessageBox(dialogOpts);
|
||||
|
||||
if (response.response === 0) {
|
||||
autoUpdater.downloadUpdate();
|
||||
}
|
||||
});
|
||||
|
||||
autoUpdater.on('update-not-available', () => {
|
||||
logger.info('Update not available.');
|
||||
});
|
||||
|
||||
/*
|
||||
* Uncomment this before we have any published updates on github releases.
|
||||
* autoUpdater.on('error', (err) => {
|
||||
* logger.error('Error in auto-updater:', err);
|
||||
* dialog.showErrorBox('Error: ', err.message);
|
||||
* });
|
||||
*/
|
||||
|
||||
autoUpdater.on('download-progress', (progressObj) => {
|
||||
logger.info('Download progress:', progressObj);
|
||||
});
|
||||
|
||||
autoUpdater.on('update-downloaded', async (event: UpdateDownloadedEvent) => {
|
||||
logger.info('Update downloaded:', formatUpdateDownloadedEvent(event));
|
||||
|
||||
const dialogOpts: MessageBoxOptions = {
|
||||
type: 'info' as const,
|
||||
buttons: ['Restart', 'Later'],
|
||||
title: 'Application Update',
|
||||
message: 'Update Downloaded',
|
||||
detail: 'A new version has been downloaded. Restart the application to apply the updates.',
|
||||
};
|
||||
|
||||
const response = await dialog.showMessageBox(dialogOpts);
|
||||
|
||||
if (response.response === 0) {
|
||||
autoUpdater.quitAndInstall(false);
|
||||
}
|
||||
});
|
||||
|
||||
// Check for updates
|
||||
try {
|
||||
logger.info('Checking for updates. Current version:', app.getVersion());
|
||||
await autoUpdater.checkForUpdates();
|
||||
} catch (err) {
|
||||
logger.error('Failed to check for updates:', err);
|
||||
}
|
||||
|
||||
// Set up periodic update checks (every 4 hours)
|
||||
setInterval(
|
||||
() => {
|
||||
autoUpdater.checkForUpdates().catch((err) => {
|
||||
logger.error('Periodic update check failed:', err);
|
||||
});
|
||||
},
|
||||
4 * 60 * 60 * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
function formatUpdateDownloadedEvent(event: UpdateDownloadedEvent): string {
|
||||
return JSON.stringify({
|
||||
version: event.version,
|
||||
downloadedFile: event.downloadedFile,
|
||||
files: event.files.map((e) => ({ files: { url: e.url, size: e.size } })),
|
||||
});
|
||||
}
|
4
electron/main/utils/constants.ts
Normal file
4
electron/main/utils/constants.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import { app } from 'electron';
|
||||
|
||||
export const isDev = !(global.process.env.NODE_ENV === 'production' || app.isPackaged);
|
||||
export const DEFAULT_PORT = 5173;
|
40
electron/main/utils/cookie.ts
Normal file
40
electron/main/utils/cookie.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { session } from 'electron';
|
||||
import { DEFAULT_PORT } from './constants';
|
||||
import { store } from './store';
|
||||
|
||||
/**
|
||||
* On app startup: read any existing cookies from store and set it as a cookie.
|
||||
*/
|
||||
export async function initCookies() {
|
||||
await loadStoredCookies();
|
||||
}
|
||||
|
||||
// Function to store all cookies
|
||||
export async function storeCookies(cookies: Electron.Cookie[]) {
|
||||
for (const cookie of cookies) {
|
||||
store.set(`cookie:${cookie.name}`, cookie);
|
||||
}
|
||||
}
|
||||
|
||||
// Function to load stored cookies
|
||||
async function loadStoredCookies() {
|
||||
// Get all keys that start with 'cookie:'
|
||||
const cookieKeys = store.store ? Object.keys(store.store).filter((key) => key.startsWith('cookie:')) : [];
|
||||
|
||||
for (const key of cookieKeys) {
|
||||
const cookie = store.get(key);
|
||||
|
||||
if (cookie) {
|
||||
try {
|
||||
// Add default URL if not present
|
||||
const cookieWithUrl = {
|
||||
...cookie,
|
||||
url: cookie.url || `http://localhost:${DEFAULT_PORT}`,
|
||||
};
|
||||
await session.defaultSession.cookies.set(cookieWithUrl);
|
||||
} catch (error) {
|
||||
console.error(`Failed to set cookie ${key}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
35
electron/main/utils/reload.ts
Normal file
35
electron/main/utils/reload.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { app } from 'electron';
|
||||
import path from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
|
||||
// Reload on change.
|
||||
let isQuited = false;
|
||||
|
||||
const abort = new AbortController();
|
||||
const { signal } = abort;
|
||||
|
||||
export async function reloadOnChange() {
|
||||
const dir = path.join(app.getAppPath(), 'build', 'electron');
|
||||
|
||||
try {
|
||||
const watcher = fs.watch(dir, { signal, recursive: true });
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
for await (const _event of watcher) {
|
||||
if (!isQuited) {
|
||||
isQuited = true;
|
||||
app.relaunch();
|
||||
app.quit();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (!(err instanceof Error)) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (err.name === 'AbortError') {
|
||||
console.log('abort watching:', dir);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
71
electron/main/utils/serve.ts
Normal file
71
electron/main/utils/serve.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import { createReadableStreamFromReadable } from '@remix-run/node';
|
||||
import type { ServerBuild } from '@remix-run/node';
|
||||
import mime from 'mime';
|
||||
import { createReadStream, promises as fs } from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
import { app } from 'electron';
|
||||
import { isDev } from './constants';
|
||||
|
||||
export async function loadServerBuild(): Promise<any> {
|
||||
if (isDev) {
|
||||
console.log('Dev mode: server build not loaded');
|
||||
return;
|
||||
}
|
||||
|
||||
const serverBuildPath = path.join(app.getAppPath(), 'build', 'server', 'index.js');
|
||||
console.log(`Loading server build... path is ${serverBuildPath}`);
|
||||
|
||||
try {
|
||||
const fileUrl = pathToFileURL(serverBuildPath).href;
|
||||
const serverBuild: ServerBuild = /** @type {ServerBuild} */ await import(fileUrl);
|
||||
console.log('Server build loaded successfully');
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return serverBuild;
|
||||
} catch (buildError) {
|
||||
console.log('Failed to load server build:', {
|
||||
message: (buildError as Error)?.message,
|
||||
stack: (buildError as Error)?.stack,
|
||||
error: JSON.stringify(buildError, Object.getOwnPropertyNames(buildError as object)),
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// serve assets built by vite.
|
||||
export async function serveAsset(req: Request, assetsPath: string): Promise<Response | undefined> {
|
||||
const url = new URL(req.url);
|
||||
const fullPath = path.join(assetsPath, decodeURIComponent(url.pathname));
|
||||
console.log('Serving asset, path:', fullPath);
|
||||
|
||||
if (!fullPath.startsWith(assetsPath)) {
|
||||
console.log('Path is outside assets directory:', fullPath);
|
||||
return;
|
||||
}
|
||||
|
||||
const stat = await fs.stat(fullPath).catch((err) => {
|
||||
console.log('Failed to stat file:', fullPath, err);
|
||||
return undefined;
|
||||
});
|
||||
|
||||
if (!stat?.isFile()) {
|
||||
console.log('Not a file:', fullPath);
|
||||
return;
|
||||
}
|
||||
|
||||
const headers = new Headers();
|
||||
const mimeType = mime.getType(fullPath);
|
||||
|
||||
if (mimeType) {
|
||||
headers.set('Content-Type', mimeType);
|
||||
}
|
||||
|
||||
console.log('Serving file with mime type:', mimeType);
|
||||
|
||||
const body = createReadableStreamFromReadable(createReadStream(fullPath));
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Response(body, { headers });
|
||||
}
|
3
electron/main/utils/store.ts
Normal file
3
electron/main/utils/store.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import ElectronStore from 'electron-store';
|
||||
|
||||
export const store = new ElectronStore<any>({ encryptionKey: 'something' });
|
44
electron/main/utils/vite-server.ts
Normal file
44
electron/main/utils/vite-server.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import { app } from 'electron';
|
||||
import type { ViteDevServer } from 'vite';
|
||||
|
||||
let viteServer: ViteDevServer | undefined;
|
||||
|
||||
// Conditionally import Vite only in development
|
||||
export async function initViteServer() {
|
||||
if (!(global.process.env.NODE_ENV === 'production' || app.isPackaged)) {
|
||||
const vite = await import('vite');
|
||||
viteServer = await vite.createServer({
|
||||
root: '.',
|
||||
envDir: process.cwd(), // load .env files from the root directory.
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
*
|
||||
* take care of vite-dev-server.
|
||||
*
|
||||
*/
|
||||
app.on('before-quit', async (_event) => {
|
||||
if (!viteServer) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* ref: https://stackoverflow.com/questions/68750716/electron-app-throwing-quit-unexpectedly-error-message-on-mac-when-quitting-the-a
|
||||
* event.preventDefault();
|
||||
*/
|
||||
try {
|
||||
console.log('will close vite-dev-server.');
|
||||
await viteServer.close();
|
||||
console.log('closed vite-dev-server.');
|
||||
|
||||
// app.quit(); // Not working. causes recursively 'before-quit' events.
|
||||
app.exit(); // Not working expectedly SOMETIMES. Still throws exception and macOS shows dialog.
|
||||
// global.process.exit(0); // Not working well... I still see exceptional dialog.
|
||||
} catch (err) {
|
||||
console.log('failed to close Vite server:', err);
|
||||
}
|
||||
});
|
||||
|
||||
export { viteServer };
|
44
electron/main/vite.config.ts
Normal file
44
electron/main/vite.config.ts
Normal file
@ -0,0 +1,44 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve('electron/main/index.ts'),
|
||||
formats: ['es'],
|
||||
},
|
||||
rollupOptions: {
|
||||
external: [
|
||||
'vite',
|
||||
'electron',
|
||||
...[
|
||||
'electron-log',
|
||||
|
||||
// electron-log uses fs internally
|
||||
'fs',
|
||||
'util',
|
||||
],
|
||||
|
||||
// Add all Node.js built-in modules as external
|
||||
'node:fs',
|
||||
'node:path',
|
||||
'node:url',
|
||||
'node:util',
|
||||
'node:stream',
|
||||
'node:events',
|
||||
'electron-store',
|
||||
'@remix-run/node',
|
||||
|
||||
// "mime", // NOTE: don't enable. not working if it's external.
|
||||
'electron-updater',
|
||||
],
|
||||
output: {
|
||||
dir: 'build/electron',
|
||||
entryFileNames: 'main/[name].mjs',
|
||||
format: 'esm',
|
||||
},
|
||||
},
|
||||
minify: false,
|
||||
emptyOutDir: false,
|
||||
},
|
||||
});
|
22
electron/preload/index.ts
Normal file
22
electron/preload/index.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { ipcRenderer, contextBridge, type IpcRendererEvent } from 'electron';
|
||||
|
||||
console.debug('start preload.', ipcRenderer);
|
||||
|
||||
const ipc = {
|
||||
invoke(...args: any[]) {
|
||||
return ipcRenderer.invoke('ipcTest', ...args);
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
|
||||
on(channel: string, func: Function) {
|
||||
const f = (event: IpcRendererEvent, ...args: any[]) => func(...[event, ...args]);
|
||||
console.debug('register listener', channel, f);
|
||||
ipcRenderer.on(channel, f);
|
||||
|
||||
return () => {
|
||||
console.debug('remove listener', channel, f);
|
||||
ipcRenderer.removeListener(channel, f);
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
contextBridge.exposeInMainWorld('ipc', ipc);
|
7
electron/preload/tsconfig.json
Normal file
7
electron/preload/tsconfig.json
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "../main/tsconfig.json",
|
||||
"include": ["./**/*.ts"],
|
||||
"compilerOptions": {
|
||||
"rootDir": "."
|
||||
}
|
||||
}
|
31
electron/preload/vite.config.ts
Normal file
31
electron/preload/vite.config.ts
Normal file
@ -0,0 +1,31 @@
|
||||
import { resolve } from 'path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve('electron/preload/index.ts'),
|
||||
formats: ['cjs'],
|
||||
},
|
||||
rollupOptions: {
|
||||
external: ['electron'],
|
||||
output: {
|
||||
dir: 'build/electron',
|
||||
|
||||
/*
|
||||
* preload must be cjs format.
|
||||
* if mjs, it will be error:
|
||||
* - Unable to load preload script.
|
||||
* - SyntaxError: Cannot use import statement outside a module.
|
||||
*/
|
||||
entryFileNames: 'preload/[name].cjs',
|
||||
format: 'cjs',
|
||||
},
|
||||
},
|
||||
minify: false,
|
||||
emptyOutDir: false,
|
||||
},
|
||||
esbuild: {
|
||||
platform: 'node',
|
||||
},
|
||||
});
|
@ -39,7 +39,7 @@ export default [
|
||||
},
|
||||
{
|
||||
files: [...tsFileExtensions, ...jsFileExtensions, '**/*.tsx'],
|
||||
ignores: ['functions/*'],
|
||||
ignores: ['functions/*', 'electron/**/*'],
|
||||
rules: {
|
||||
'no-restricted-imports': [
|
||||
'error',
|
||||
|
31
notarize.cjs
Normal file
31
notarize.cjs
Normal file
@ -0,0 +1,31 @@
|
||||
const { notarize } = require('@electron/notarize');
|
||||
|
||||
exports.default = async function notarizing(context) {
|
||||
const { electronPlatformName, appOutDir } = context;
|
||||
|
||||
if (electronPlatformName !== 'darwin') {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip notarization when identity is null (development build)
|
||||
if (!context.packager.config.mac || context.packager.config.mac.identity === null) {
|
||||
console.log('Skipping notarization: identity is null');
|
||||
return;
|
||||
}
|
||||
|
||||
const appName = context.packager.appInfo.productFilename;
|
||||
const appBundleId = context.packager.config.appId;
|
||||
|
||||
try {
|
||||
console.log(`Notarizing ${appBundleId} found at ${appOutDir}/${appName}.app`);
|
||||
await notarize({
|
||||
tool: 'notarytool',
|
||||
appPath: `${appOutDir}/${appName}.app`,
|
||||
teamId: process.env.APPLE_TEAM_ID,
|
||||
});
|
||||
console.log(`Done notarizing ${appBundleId}`);
|
||||
} catch (error) {
|
||||
console.error('Notarization failed:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
39
package.json
39
package.json
@ -6,6 +6,10 @@
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"version": "0.0.7",
|
||||
"author": {
|
||||
"name": "bolt.diy team",
|
||||
"email": "maintainers@bolt.diy"
|
||||
},
|
||||
"scripts": {
|
||||
"deploy": "npm run build && wrangler pages deploy",
|
||||
"build": "remix vite:build",
|
||||
@ -25,7 +29,20 @@
|
||||
"typegen": "wrangler types",
|
||||
"preview": "pnpm run build && pnpm run start",
|
||||
"prepare": "husky",
|
||||
"clean": "node scripts/clean.js"
|
||||
"clean": "node scripts/clean.js",
|
||||
"electron:dev": "pnpm electron:dev:main",
|
||||
"electron:dev:renderer": "cross-env NODE_ENV=development pnpm exec electron electron/dev-server.mjs",
|
||||
"electron:dev:main": "cross-env NODE_ENV=development pnpm run electron:build:deps && electron build/electron/main/index.mjs",
|
||||
"electron:build:start": "electron-builder start",
|
||||
"electron:build:deps": "concurrently \"pnpm electron:build:main\" \"pnpm electron:build:preload\" --kill-others-on-fail",
|
||||
"electron:build:main": "vite build --config ./electron/main/vite.config.ts",
|
||||
"electron:build:preload": "vite build --config ./electron/preload/vite.config.ts",
|
||||
"electron:build:renderer": "remix vite:build --config vite-electron.config.js",
|
||||
"electron:build:unpack": "rm -rf dist && pnpm electron:build:renderer && pnpm electron:build:deps && electron-builder --dir",
|
||||
"electron:build:mac": "rm -rf dist && pnpm electron:build:renderer && pnpm electron:build:deps && electron-builder --mac",
|
||||
"electron:build:win": "rm -rf dist && pnpm electron:build:renderer && pnpm electron:build:deps && electron-builder --win",
|
||||
"electron:build:linux": "rm -rf dist && pnpm electron:build:renderer && pnpm electron:build:deps && electron-builder --linux",
|
||||
"electron:build:dist": "rm -rf dist && pnpm electron:build:renderer && pnpm electron:build:deps && electron-builder --mwl"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.18.0"
|
||||
@ -94,6 +111,9 @@
|
||||
"date-fns": "^3.6.0",
|
||||
"diff": "^5.2.0",
|
||||
"dotenv": "^16.4.7",
|
||||
"electron-log": "^5.2.3",
|
||||
"electron-store": "^10.0.0",
|
||||
"electron-updater": "^6.3.9",
|
||||
"file-saver": "^2.0.5",
|
||||
"framer-motion": "^11.12.0",
|
||||
"ignore": "^6.0.2",
|
||||
@ -104,6 +124,7 @@
|
||||
"js-cookie": "^3.0.5",
|
||||
"jspdf": "^2.5.2",
|
||||
"jszip": "^3.10.1",
|
||||
"mime": "^4.0.4",
|
||||
"nanostores": "^0.10.3",
|
||||
"ollama-ai-provider": "^0.15.2",
|
||||
"path-browserify": "^1.0.1",
|
||||
@ -123,26 +144,36 @@
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remix-island": "^0.2.0",
|
||||
"remix-utils": "^7.7.0",
|
||||
"rollup-plugin-node-polyfills": "^0.2.1",
|
||||
"shiki": "^1.24.0",
|
||||
"tailwind-merge": "^2.2.1",
|
||||
"unist-util-visit": "^5.0.0",
|
||||
"vite-plugin-node-polyfills": "^0.22.0",
|
||||
"zod": "^3.24.1",
|
||||
"zustand": "^5.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@blitz/eslint-plugin": "0.1.0",
|
||||
"@cloudflare/workers-types": "^4.20241127.0",
|
||||
"@electron/notarize": "^2.5.0",
|
||||
"@iconify-json/ph": "^1.2.1",
|
||||
"@iconify/types": "^2.0.0",
|
||||
"@remix-run/dev": "^2.15.2",
|
||||
"@remix-run/serve": "^2.15.2",
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.2.0",
|
||||
"@types/diff": "^5.2.3",
|
||||
"@types/dom-speech-recognition": "^0.0.4",
|
||||
"@types/electron": "^1.6.10",
|
||||
"@types/file-saver": "^2.0.7",
|
||||
"@types/js-cookie": "^3.0.6",
|
||||
"@types/path-browserify": "^1.0.3",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"concurrently": "^8.2.2",
|
||||
"cross-env": "^7.0.3",
|
||||
"electron": "^33.2.0",
|
||||
"electron-builder": "^25.1.8",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"husky": "9.1.7",
|
||||
@ -151,17 +182,17 @@
|
||||
"node-fetch": "^3.3.2",
|
||||
"pnpm": "^9.14.4",
|
||||
"prettier": "^3.4.1",
|
||||
"rimraf": "^4.4.1",
|
||||
"sass-embedded": "^1.81.0",
|
||||
"typescript": "^5.7.2",
|
||||
"unified": "^11.0.5",
|
||||
"unocss": "^0.61.9",
|
||||
"vite": "^5.4.11",
|
||||
"vite-plugin-node-polyfills": "^0.22.0",
|
||||
"vite-plugin-copy": "^0.1.6",
|
||||
"vite-plugin-optimize-css-modules": "^1.1.0",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^2.1.7",
|
||||
"wrangler": "^3.91.0",
|
||||
"zod": "^3.24.1"
|
||||
"wrangler": "^3.91.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"@typescript-eslint/utils": "^8.0.0-alpha.30"
|
||||
|
8039
pnpm-lock.yaml
generated
8039
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,8 @@
|
||||
"@remix-run/cloudflare",
|
||||
"vite/client",
|
||||
"@cloudflare/workers-types/2023-07-01",
|
||||
"@types/dom-speech-recognition"
|
||||
"@types/dom-speech-recognition",
|
||||
"electron"
|
||||
],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
@ -23,7 +24,6 @@
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// vite takes care of building everything, not tsc
|
||||
"noEmit": true
|
||||
},
|
||||
|
75
vite-electron.config.ts
Normal file
75
vite-electron.config.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { defineConfig } from 'vite';
|
||||
import { vitePlugin as remixVitePlugin } from '@remix-run/dev';
|
||||
import UnoCSS from 'unocss/vite';
|
||||
import { nodePolyfills } from 'vite-plugin-node-polyfills';
|
||||
import { optimizeCssModules } from 'vite-plugin-optimize-css-modules';
|
||||
import tsconfigPaths from 'vite-tsconfig-paths';
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
|
||||
// Get git hash with fallback
|
||||
const getGitHash = () => {
|
||||
try {
|
||||
return execSync('git rev-parse --short HEAD').toString().trim();
|
||||
} catch {
|
||||
return 'no-git-info';
|
||||
}
|
||||
};
|
||||
|
||||
export default defineConfig((config) => {
|
||||
return {
|
||||
define: {
|
||||
__COMMIT_HASH: JSON.stringify(getGitHash()),
|
||||
__APP_VERSION: JSON.stringify(process.env.npm_package_version),
|
||||
},
|
||||
build: {
|
||||
target: 'esnext',
|
||||
},
|
||||
plugins: [
|
||||
nodePolyfills({
|
||||
include: ['path', 'buffer', 'process'],
|
||||
}),
|
||||
remixVitePlugin({
|
||||
future: {
|
||||
v3_fetcherPersist: true,
|
||||
v3_relativeSplatPath: true,
|
||||
v3_throwAbortReason: true,
|
||||
v3_lazyRouteDiscovery: true,
|
||||
},
|
||||
serverModuleFormat: 'esm',
|
||||
}),
|
||||
UnoCSS(),
|
||||
tsconfigPaths(),
|
||||
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
|
||||
{
|
||||
name: 'replaceReactDomServerImport',
|
||||
enforce: 'pre',
|
||||
transform(code, id) {
|
||||
if (id.endsWith('entry.server.tsx')) {
|
||||
/*
|
||||
* Hack: fix the issue with react-dom/server not being found in electron
|
||||
* Replace the import from 'react-dom/server' with 'react-dom/server.browser', only for electron build
|
||||
*/
|
||||
return code.replace(/from 'react-dom\/server';?/g, "from 'react-dom/server.browser';");
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
},
|
||||
],
|
||||
envPrefix: [
|
||||
'VITE_',
|
||||
'OPENAI_LIKE_API_BASE_URL',
|
||||
'OLLAMA_API_BASE_URL',
|
||||
'LMSTUDIO_API_BASE_URL',
|
||||
'TOGETHER_API_BASE_URL',
|
||||
],
|
||||
css: {
|
||||
preprocessorOptions: {
|
||||
scss: {
|
||||
api: 'modern-compiler',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
@ -89,14 +89,55 @@ export default defineConfig((config) => {
|
||||
__PKG_DEV_DEPENDENCIES: JSON.stringify(pkg.devDependencies),
|
||||
__PKG_PEER_DEPENDENCIES: JSON.stringify(pkg.peerDependencies),
|
||||
__PKG_OPTIONAL_DEPENDENCIES: JSON.stringify(pkg.optionalDependencies),
|
||||
// Define global values
|
||||
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV),
|
||||
},
|
||||
build: {
|
||||
target: 'esnext',
|
||||
rollupOptions: {
|
||||
output: {
|
||||
format: 'esm',
|
||||
},
|
||||
},
|
||||
commonjsOptions: {
|
||||
transformMixedEsModules: true,
|
||||
},
|
||||
},
|
||||
optimizeDeps: {
|
||||
esbuildOptions: {
|
||||
define: {
|
||||
global: 'globalThis',
|
||||
},
|
||||
},
|
||||
},
|
||||
resolve: {
|
||||
alias: {
|
||||
buffer: 'vite-plugin-node-polyfills/polyfills/buffer',
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
nodePolyfills({
|
||||
include: ['path', 'buffer', 'process'],
|
||||
include: ['buffer', 'process', 'util', 'stream'],
|
||||
globals: {
|
||||
Buffer: true,
|
||||
process: true,
|
||||
global: true,
|
||||
},
|
||||
protocolImports: true,
|
||||
// Exclude Node.js modules that shouldn't be polyfilled in Cloudflare
|
||||
exclude: ['child_process', 'fs', 'path'],
|
||||
}),
|
||||
{
|
||||
name: 'buffer-polyfill',
|
||||
transform(code, id) {
|
||||
if (id.includes('env.mjs')) {
|
||||
return {
|
||||
code: `import { Buffer } from 'buffer';\n${code}`,
|
||||
map: null,
|
||||
};
|
||||
}
|
||||
},
|
||||
},
|
||||
config.mode !== 'test' && remixCloudflareDevProxy(),
|
||||
remixVitePlugin({
|
||||
future: {
|
||||
|
Loading…
x
Reference in New Issue
Block a user