Merge pull request #26 from wtlyu/master

support config host name and proxy address. and fix a docker bug
This commit is contained in:
Danny Avila 2023-03-11 12:05:16 -05:00 committed by GitHub
commit 8624062488
17 changed files with 130 additions and 67 deletions

View file

@ -55,21 +55,27 @@ Currently, this project is only functional with the `text-davinci-003` model.
</details> </details>
# Table of Contents # Table of Contents
* [Roadmap](#roadmap) - [ChatGPT Clone](#chatgpt-clone)
* [Features](#features) - [All AI Conversations under One Roof.](#all-ai-conversations-under-one-roof)
* [Tech Stack](#tech-stack) - [Updates](#updates)
* [Getting Started](#getting-started) - [Table of Contents](#table-of-contents)
* [Prerequisites](#prerequisites) - [Roadmap](#roadmap)
* [Usage](#usage) - [Features](#features)
* [Local (npm)](#npm) - [Tech Stack](#tech-stack)
* [Docker](#docker) - [Getting Started](#getting-started)
* [Access Tokens](#access-tokens) - [Prerequisites](#prerequisites)
* [Updating](#updating) - [Usage](#usage)
* [Use Cases](#use-cases) - [Local](#local)
* [Origin](#origin) - [Docker](#docker)
* [Caveats](#caveats) - [Access Tokens](#access-tokens)
* [Contributing](#contributing) - [Proxy](#proxy)
* [License](#license) - [Updating](#updating)
- [Use Cases](#use-cases)
- [Origin](#origin)
- [Caveats](#caveats)
- [Regarding use of Official ChatGPT API](#regarding-use-of-official-chatgpt-api)
- [Contributing](#contributing)
- [License](#license)
## Roadmap ## Roadmap
@ -143,6 +149,8 @@ Here are my recently completed and planned features:
- **Run** `npm run build` in /client/ dir, `npm start` in /api/ dir - **Run** `npm run build` in /client/ dir, `npm start` in /api/ dir
- **Visit** http://localhost:3080 (default port) & enjoy - **Visit** http://localhost:3080 (default port) & enjoy
By default, only local machine can access this server. To share within network or serve as a public server, set `HOST` to `0.0.0.0` in `.env` file
### Docker ### Docker
- **Provide** all credentials, (API keys, access tokens, and Mongo Connection String) in [docker-compose.yml](docker-compose.yml) under api service - **Provide** all credentials, (API keys, access tokens, and Mongo Connection String) in [docker-compose.yml](docker-compose.yml) under api service
@ -180,6 +188,42 @@ The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an exte
**Note:** Specific error handling and styling for this model is still in progress. **Note:** Specific error handling and styling for this model is still in progress.
</details> </details>
### Proxy
If your server cannot connect to the chatGPT API server by some reason, (eg in China). You can set a environment variable `PROXY`. This will be transmitted to `node-chatgpt-api` interface.
**Warning:** `PROXY` is not `reverseProxyUrl` in `node-chatgpt-api`
<details>
<summary><strong>Set up proxy in local environment </strong></summary>
Here is two ways to set proxy.
- Option 1: system level environment
`export PROXY="http://127.0.0.1:7890"`
- Option 2: set in .env file
`PROXY="http://127.0.0.1:7890"`
**Change `http://127.0.0.1:7890` to your proxy server**
</details>
<details>
<summary><strong>Set up proxy in docker environment </strong></summary>
set in docker-compose.yml file, under services - api - environment
```
api:
...
environment:
...
- "PROXY=http://127.0.0.1:7890"
# add this line ↑
```
**Change `http://127.0.0.1:7890` to your proxy server**
</details>
### Updating ### Updating
- As the project is still a work-in-progress, you should pull the latest and run the steps over. Reset your browser cache/clear site data. - As the project is still a work-in-progress, you should pull the latest and run the steps over. Reset your browser cache/clear site data.

View file

@ -1,13 +1,22 @@
OPENAI_KEY= # Server configuration.
HOST= # The server will listen to localhost:3080 request by default. You can set the target ip as you want.
# If you want this server can be used outside your local machine, for example to share with other
# machine or expose this from a docker container, set HOST=0.0.0.0 or your external ip interface.
#
# Tips: HOST=0.0.0.0 means listening on all interface. It's not a real ip. Use localhost:port rather
# than 0.0.0.0:port to open it.
HOST=localhost
PORT=3080 PORT=3080
NODE_ENV=development NODE_ENV=development
# Change this to proxy any API request. It's useful if your machine have difficulty calling the original API server.
# PROXY="http://YOUR_PROXY_SERVER"
# Change this to your MongoDB URI if different and I recommend appending chatgpt-clone # Change this to your MongoDB URI if different and I recommend appending chatgpt-clone
MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone" MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
# Change this to proxy any request. # API key configuration.
PROXY= # Leave blank if you don't want them.
OPENAI_KEY=
CHATGPT_TOKEN="" CHATGPT_TOKEN=
BING_TOKEN="" BING_TOKEN=

View file

@ -8,6 +8,8 @@ RUN npm install
COPY . /api/ COPY . /api/
# Make port 3080 available to the world outside this container # Make port 3080 available to the world outside this container
EXPOSE 3080 EXPOSE 3080
# Expose the server to 0.0.0.0
ENV HOST=0.0.0.0
# Run the app when the container launches # Run the app when the container launches
CMD ["npm", "start"] CMD ["npm", "start"]

View file

@ -10,7 +10,8 @@ const askBing = async ({ text, progressCallback, convo }) => {
// If the above doesn't work, provide all your cookies as a string instead // If the above doesn't work, provide all your cookies as a string instead
// cookies: '', // cookies: '',
debug: false, debug: false,
cache: { store: new KeyvFile({ filename: './data/cache.json' }) } cache: { store: new KeyvFile({ filename: './data/cache.json' }) },
proxy: process.env.PROXY || null,
}); });
let options = { let options = {

View file

@ -7,6 +7,7 @@ const clientOptions = {
// Access token from https://chat.openai.com/api/auth/session // Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN, accessToken: process.env.CHATGPT_TOKEN,
// debug: true // debug: true
proxy: process.env.PROXY || null,
}; };
const browserClient = async ({ text, progressCallback, convo }) => { const browserClient = async ({ text, progressCallback, convo }) => {

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: { modelOptions: {
model: 'gpt-3.5-turbo' model: 'gpt-3.5-turbo'
}, },
proxy: process.env.PROXY || null,
debug: false debug: false
}; };

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: { modelOptions: {
model: 'gpt-3.5-turbo' model: 'gpt-3.5-turbo'
}, },
proxy: process.env.PROXY || null,
debug: false debug: false
}; };

View file

@ -1,28 +1,32 @@
const express = require('express'); const express = require('express');
const dbConnect = require('../models/dbConnect'); const dbConnect = require('../models/dbConnect');
const path = require('path'); const path = require('path');
const cors = require('cors'); const cors = require('cors');
const routes = require('./routes'); const routes = require('./routes');
const app = express(); const app = express();
const port = process.env.PORT || 3080; const port = process.env.PORT || 3080;
const projectPath = path.join(__dirname, '..', '..', 'client'); const host = process.env.HOST || 'localhost'
dbConnect().then(() => console.log('Connected to MongoDB')); const projectPath = path.join(__dirname, '..', '..', 'client');
dbConnect().then(() => console.log('Connected to MongoDB'));
app.use(cors());
app.use(express.json()); app.use(cors());
app.use(express.static(path.join(projectPath, 'public'))); app.use(express.json());
app.use(express.static(path.join(projectPath, 'public')));
app.get('/', function (req, res) {
console.log(path.join(projectPath, 'public', 'index.html')); app.get('/', function (req, res) {
res.sendFile(path.join(projectPath, 'public', 'index.html')); console.log(path.join(projectPath, 'public', 'index.html'));
}); res.sendFile(path.join(projectPath, 'public', 'index.html'));
});
app.use('/api/ask', routes.ask);
app.use('/api/messages', routes.messages); app.use('/api/ask', routes.ask);
app.use('/api/convos', routes.convos); app.use('/api/messages', routes.messages);
app.use('/api/customGpts', routes.customGpts); app.use('/api/convos', routes.convos);
app.use('/api/prompts', routes.prompts); app.use('/api/customGpts', routes.customGpts);
app.use('/api/prompts', routes.prompts);
app.listen(port, () => {
console.log(`Server listening at http://localhost:${port}`); app.listen(port, host, () => {
}); if (host=='0.0.0.0')
console.log(`Server listening on all interface at port ${port}. Use http://localhost:${port} to access it`);
else
console.log(`Server listening at http://${host=='0.0.0.0'?'localhost':host}:${port}`);
});

View file

@ -23,8 +23,8 @@ export default function Conversation({
const { modelMap } = useSelector((state) => state.models); const { modelMap } = useSelector((state) => state.models);
const inputRef = useRef(null); const inputRef = useRef(null);
const dispatch = useDispatch(); const dispatch = useDispatch();
const { trigger } = manualSWR(`http://localhost:3080/api/messages/${id}`, 'get'); const { trigger } = manualSWR(`/api/messages/${id}`, 'get');
const rename = manualSWR(`http://localhost:3080/api/convos/update`, 'post'); const rename = manualSWR(`/api/convos/update`, 'post');
const clickHandler = async () => { const clickHandler = async () => {
if (conversationId === id) { if (conversationId === id) {

View file

@ -9,7 +9,7 @@ import { setMessages } from '~/store/messageSlice';
export default function DeleteButton({ conversationId, renaming, cancelHandler }) { export default function DeleteButton({ conversationId, renaming, cancelHandler }) {
const dispatch = useDispatch(); const dispatch = useDispatch();
const { trigger } = manualSWR( const { trigger } = manualSWR(
`http://localhost:3080/api/convos/clear`, `/api/convos/clear`,
'post', 'post',
() => { () => {
dispatch(setMessages([])); dispatch(setMessages([]));

View file

@ -25,7 +25,7 @@ export default function ModelDialog({ mutate, setModelSave, handleSaveState }) {
const [saveText, setSaveText] = useState('Save'); const [saveText, setSaveText] = useState('Save');
const [required, setRequired] = useState(false); const [required, setRequired] = useState(false);
const inputRef = useRef(null); const inputRef = useRef(null);
const updateCustomGpt = manualSWR(`http://localhost:3080/api/customGpts/`, 'post'); const updateCustomGpt = manualSWR(`/api/customGpts/`, 'post');
const submitHandler = (e) => { const submitHandler = (e) => {
if (chatGptLabel.length === 0) { if (chatGptLabel.length === 0) {

View file

@ -15,8 +15,8 @@ export default function ModelItem({ modelName, value, onSelect }) {
const [currentName, setCurrentName] = useState(modelName); const [currentName, setCurrentName] = useState(modelName);
const [modelInput, setModelInput] = useState(modelName); const [modelInput, setModelInput] = useState(modelName);
const inputRef = useRef(null); const inputRef = useRef(null);
const rename = manualSWR(`http://localhost:3080/api/customGpts`, 'post'); const rename = manualSWR(`/api/customGpts`, 'post');
const deleteCustom = manualSWR(`http://localhost:3080/api/customGpts/delete`, 'post'); const deleteCustom = manualSWR(`/api/customGpts/delete`, 'post');
if (value === 'chatgptCustom') { if (value === 'chatgptCustom') {
return ( return (

View file

@ -27,7 +27,7 @@ export default function ModelMenu() {
const [menuOpen, setMenuOpen] = useState(false); const [menuOpen, setMenuOpen] = useState(false);
const { model, customModel } = useSelector((state) => state.submit); const { model, customModel } = useSelector((state) => state.submit);
const { models, modelMap, initial } = useSelector((state) => state.models); const { models, modelMap, initial } = useSelector((state) => state.models);
const { trigger } = manualSWR(`http://localhost:3080/api/customGpts`, 'get', (res) => { const { trigger } = manualSWR(`/api/customGpts`, 'get', (res) => {
const fetchedModels = res.map((modelItem) => ({ const fetchedModels = res.map((modelItem) => ({
...modelItem, ...modelItem,
name: modelItem.chatGptLabel name: modelItem.chatGptLabel

View file

@ -10,10 +10,10 @@ export default function ClearConvos() {
const dispatch = useDispatch(); const dispatch = useDispatch();
const { mutate } = useSWRConfig(); const { mutate } = useSWRConfig();
const { trigger } = manualSWR(`http://localhost:3080/api/convos/clear`, 'post', () => { const { trigger } = manualSWR(`/api/convos/clear`, 'post', () => {
dispatch(setMessages([])); dispatch(setMessages([]));
dispatch(setNewConvo()); dispatch(setNewConvo());
mutate(`http://localhost:3080/api/convos`); mutate(`/api/convos`);
}); });
const clickHandler = () => { const clickHandler = () => {

View file

@ -17,7 +17,7 @@ export default function Nav() {
}; };
const { data, isLoading, mutate } = swr( const { data, isLoading, mutate } = swr(
`http://localhost:3080/api/convos?pageNumber=${pageNumber}`, `/api/convos?pageNumber=${pageNumber}`,
onSuccess onSuccess
); );
const containerRef = useRef(null); const containerRef = useRef(null);

View file

@ -11,7 +11,7 @@ export default function handleSubmit({
chatGptLabel, chatGptLabel,
promptPrefix promptPrefix
}) { }) {
const endpoint = `http://localhost:3080/api/ask`; const endpoint = `/api/ask`;
let payload = { model, text, chatGptLabel, promptPrefix }; let payload = { model, text, chatGptLabel, promptPrefix };
if (convo.conversationId && convo.parentMessageId) { if (convo.conversationId && convo.parentMessageId) {
payload = { payload = {

View file

@ -17,11 +17,11 @@ services:
image: node-api image: node-api
restart: always restart: always
environment: environment:
- PORT=3080 - "PORT=3080"
- MONGO_URI=mongodb://mongodb:27017/chatgpt-clone - "MONGO_URI=mongodb://mongodb:27017/chatgpt-clone"
- OPENAI_KEY="" - "OPENAI_KEY="
- CHATGPT_TOKEN="" - "CHATGPT_TOKEN="
- BING_TOKEN="" - "BING_TOKEN="
ports: ports:
- "9000:3080" - "9000:3080"
volumes: volumes: