Merge pull request #26 from wtlyu/master

support config host name and proxy address. and fix a docker bug
This commit is contained in:
Danny Avila 2023-03-11 12:05:16 -05:00 committed by GitHub
commit 8624062488
17 changed files with 130 additions and 67 deletions

View file

@ -55,21 +55,27 @@ Currently, this project is only functional with the `text-davinci-003` model.
</details>
# Table of Contents
* [Roadmap](#roadmap)
* [Features](#features)
* [Tech Stack](#tech-stack)
* [Getting Started](#getting-started)
* [Prerequisites](#prerequisites)
* [Usage](#usage)
* [Local (npm)](#npm)
* [Docker](#docker)
* [Access Tokens](#access-tokens)
* [Updating](#updating)
* [Use Cases](#use-cases)
* [Origin](#origin)
* [Caveats](#caveats)
* [Contributing](#contributing)
* [License](#license)
- [ChatGPT Clone](#chatgpt-clone)
- [All AI Conversations under One Roof.](#all-ai-conversations-under-one-roof)
- [Updates](#updates)
- [Table of Contents](#table-of-contents)
- [Roadmap](#roadmap)
- [Features](#features)
- [Tech Stack](#tech-stack)
- [Getting Started](#getting-started)
- [Prerequisites](#prerequisites)
- [Usage](#usage)
- [Local](#local)
- [Docker](#docker)
- [Access Tokens](#access-tokens)
- [Proxy](#proxy)
- [Updating](#updating)
- [Use Cases](#use-cases)
- [Origin](#origin)
- [Caveats](#caveats)
- [Regarding use of Official ChatGPT API](#regarding-use-of-official-chatgpt-api)
- [Contributing](#contributing)
- [License](#license)
## Roadmap
@ -143,6 +149,8 @@ Here are my recently completed and planned features:
- **Run** `npm run build` in /client/ dir, `npm start` in /api/ dir
- **Visit** http://localhost:3080 (default port) & enjoy
By default, only local machine can access this server. To share within network or serve as a public server, set `HOST` to `0.0.0.0` in `.env` file
### Docker
- **Provide** all credentials, (API keys, access tokens, and Mongo Connection String) in [docker-compose.yml](docker-compose.yml) under api service
@ -180,6 +188,42 @@ The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an exte
**Note:** Specific error handling and styling for this model is still in progress.
</details>
### Proxy
If your server cannot connect to the chatGPT API server by some reason, (eg in China). You can set a environment variable `PROXY`. This will be transmitted to `node-chatgpt-api` interface.
**Warning:** `PROXY` is not `reverseProxyUrl` in `node-chatgpt-api`
<details>
<summary><strong>Set up proxy in local environment </strong></summary>
Here is two ways to set proxy.
- Option 1: system level environment
`export PROXY="http://127.0.0.1:7890"`
- Option 2: set in .env file
`PROXY="http://127.0.0.1:7890"`
**Change `http://127.0.0.1:7890` to your proxy server**
</details>
<details>
<summary><strong>Set up proxy in docker environment </strong></summary>
set in docker-compose.yml file, under services - api - environment
```
api:
...
environment:
...
- "PROXY=http://127.0.0.1:7890"
# add this line ↑
```
**Change `http://127.0.0.1:7890` to your proxy server**
</details>
### Updating
- As the project is still a work-in-progress, you should pull the latest and run the steps over. Reset your browser cache/clear site data.

View file

@ -1,13 +1,22 @@
OPENAI_KEY=
HOST=
# Server configuration.
# The server will listen to localhost:3080 request by default. You can set the target ip as you want.
# If you want this server can be used outside your local machine, for example to share with other
# machine or expose this from a docker container, set HOST=0.0.0.0 or your external ip interface.
#
# Tips: HOST=0.0.0.0 means listening on all interface. It's not a real ip. Use localhost:port rather
# than 0.0.0.0:port to open it.
HOST=localhost
PORT=3080
NODE_ENV=development
# Change this to proxy any API request. It's useful if your machine have difficulty calling the original API server.
# PROXY="http://YOUR_PROXY_SERVER"
# Change this to your MongoDB URI if different and I recommend appending chatgpt-clone
MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
# Change this to proxy any request.
PROXY=
CHATGPT_TOKEN=""
BING_TOKEN=""
# API key configuration.
# Leave blank if you don't want them.
OPENAI_KEY=
CHATGPT_TOKEN=
BING_TOKEN=

View file

@ -8,6 +8,8 @@ RUN npm install
COPY . /api/
# Make port 3080 available to the world outside this container
EXPOSE 3080
# Expose the server to 0.0.0.0
ENV HOST=0.0.0.0
# Run the app when the container launches
CMD ["npm", "start"]

View file

@ -10,7 +10,8 @@ const askBing = async ({ text, progressCallback, convo }) => {
// If the above doesn't work, provide all your cookies as a string instead
// cookies: '',
debug: false,
cache: { store: new KeyvFile({ filename: './data/cache.json' }) }
cache: { store: new KeyvFile({ filename: './data/cache.json' }) },
proxy: process.env.PROXY || null,
});
let options = {

View file

@ -7,6 +7,7 @@ const clientOptions = {
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN,
// debug: true
proxy: process.env.PROXY || null,
};
const browserClient = async ({ text, progressCallback, convo }) => {

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};

View file

@ -5,6 +5,7 @@ const cors = require('cors');
const routes = require('./routes');
const app = express();
const port = process.env.PORT || 3080;
const host = process.env.HOST || 'localhost'
const projectPath = path.join(__dirname, '..', '..', 'client');
dbConnect().then(() => console.log('Connected to MongoDB'));
@ -23,6 +24,9 @@ app.use('/api/convos', routes.convos);
app.use('/api/customGpts', routes.customGpts);
app.use('/api/prompts', routes.prompts);
app.listen(port, () => {
console.log(`Server listening at http://localhost:${port}`);
app.listen(port, host, () => {
if (host=='0.0.0.0')
console.log(`Server listening on all interface at port ${port}. Use http://localhost:${port} to access it`);
else
console.log(`Server listening at http://${host=='0.0.0.0'?'localhost':host}:${port}`);
});

View file

@ -23,8 +23,8 @@ export default function Conversation({
const { modelMap } = useSelector((state) => state.models);
const inputRef = useRef(null);
const dispatch = useDispatch();
const { trigger } = manualSWR(`http://localhost:3080/api/messages/${id}`, 'get');
const rename = manualSWR(`http://localhost:3080/api/convos/update`, 'post');
const { trigger } = manualSWR(`/api/messages/${id}`, 'get');
const rename = manualSWR(`/api/convos/update`, 'post');
const clickHandler = async () => {
if (conversationId === id) {

View file

@ -9,7 +9,7 @@ import { setMessages } from '~/store/messageSlice';
export default function DeleteButton({ conversationId, renaming, cancelHandler }) {
const dispatch = useDispatch();
const { trigger } = manualSWR(
`http://localhost:3080/api/convos/clear`,
`/api/convos/clear`,
'post',
() => {
dispatch(setMessages([]));

View file

@ -25,7 +25,7 @@ export default function ModelDialog({ mutate, setModelSave, handleSaveState }) {
const [saveText, setSaveText] = useState('Save');
const [required, setRequired] = useState(false);
const inputRef = useRef(null);
const updateCustomGpt = manualSWR(`http://localhost:3080/api/customGpts/`, 'post');
const updateCustomGpt = manualSWR(`/api/customGpts/`, 'post');
const submitHandler = (e) => {
if (chatGptLabel.length === 0) {

View file

@ -15,8 +15,8 @@ export default function ModelItem({ modelName, value, onSelect }) {
const [currentName, setCurrentName] = useState(modelName);
const [modelInput, setModelInput] = useState(modelName);
const inputRef = useRef(null);
const rename = manualSWR(`http://localhost:3080/api/customGpts`, 'post');
const deleteCustom = manualSWR(`http://localhost:3080/api/customGpts/delete`, 'post');
const rename = manualSWR(`/api/customGpts`, 'post');
const deleteCustom = manualSWR(`/api/customGpts/delete`, 'post');
if (value === 'chatgptCustom') {
return (

View file

@ -27,7 +27,7 @@ export default function ModelMenu() {
const [menuOpen, setMenuOpen] = useState(false);
const { model, customModel } = useSelector((state) => state.submit);
const { models, modelMap, initial } = useSelector((state) => state.models);
const { trigger } = manualSWR(`http://localhost:3080/api/customGpts`, 'get', (res) => {
const { trigger } = manualSWR(`/api/customGpts`, 'get', (res) => {
const fetchedModels = res.map((modelItem) => ({
...modelItem,
name: modelItem.chatGptLabel

View file

@ -10,10 +10,10 @@ export default function ClearConvos() {
const dispatch = useDispatch();
const { mutate } = useSWRConfig();
const { trigger } = manualSWR(`http://localhost:3080/api/convos/clear`, 'post', () => {
const { trigger } = manualSWR(`/api/convos/clear`, 'post', () => {
dispatch(setMessages([]));
dispatch(setNewConvo());
mutate(`http://localhost:3080/api/convos`);
mutate(`/api/convos`);
});
const clickHandler = () => {

View file

@ -17,7 +17,7 @@ export default function Nav() {
};
const { data, isLoading, mutate } = swr(
`http://localhost:3080/api/convos?pageNumber=${pageNumber}`,
`/api/convos?pageNumber=${pageNumber}`,
onSuccess
);
const containerRef = useRef(null);

View file

@ -11,7 +11,7 @@ export default function handleSubmit({
chatGptLabel,
promptPrefix
}) {
const endpoint = `http://localhost:3080/api/ask`;
const endpoint = `/api/ask`;
let payload = { model, text, chatGptLabel, promptPrefix };
if (convo.conversationId && convo.parentMessageId) {
payload = {

View file

@ -17,11 +17,11 @@ services:
image: node-api
restart: always
environment:
- PORT=3080
- MONGO_URI=mongodb://mongodb:27017/chatgpt-clone
- OPENAI_KEY=""
- CHATGPT_TOKEN=""
- BING_TOKEN=""
- "PORT=3080"
- "MONGO_URI=mongodb://mongodb:27017/chatgpt-clone"
- "OPENAI_KEY="
- "CHATGPT_TOKEN="
- "BING_TOKEN="
ports:
- "9000:3080"
volumes: