Merge pull request #26 from wtlyu/master

support config host name and proxy address. and fix a docker bug
This commit is contained in:
Danny Avila 2023-03-11 12:05:16 -05:00 committed by GitHub
commit 8624062488
17 changed files with 130 additions and 67 deletions

View file

@ -1,13 +1,22 @@
OPENAI_KEY=
HOST=
# Server configuration.
# The server will listen to localhost:3080 request by default. You can set the target ip as you want.
# If you want this server can be used outside your local machine, for example to share with other
# machine or expose this from a docker container, set HOST=0.0.0.0 or your external ip interface.
#
# Tips: HOST=0.0.0.0 means listening on all interface. It's not a real ip. Use localhost:port rather
# than 0.0.0.0:port to open it.
HOST=localhost
PORT=3080
NODE_ENV=development
# Change this to proxy any API request. It's useful if your machine have difficulty calling the original API server.
# PROXY="http://YOUR_PROXY_SERVER"
# Change this to your MongoDB URI if different and I recommend appending chatgpt-clone
MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
# Change this to proxy any request.
PROXY=
CHATGPT_TOKEN=""
BING_TOKEN=""
# API key configuration.
# Leave blank if you don't want them.
OPENAI_KEY=
CHATGPT_TOKEN=
BING_TOKEN=

View file

@ -8,6 +8,8 @@ RUN npm install
COPY . /api/
# Make port 3080 available to the world outside this container
EXPOSE 3080
# Expose the server to 0.0.0.0
ENV HOST=0.0.0.0
# Run the app when the container launches
CMD ["npm", "start"]

View file

@ -10,7 +10,8 @@ const askBing = async ({ text, progressCallback, convo }) => {
// If the above doesn't work, provide all your cookies as a string instead
// cookies: '',
debug: false,
cache: { store: new KeyvFile({ filename: './data/cache.json' }) }
cache: { store: new KeyvFile({ filename: './data/cache.json' }) },
proxy: process.env.PROXY || null,
});
let options = {

View file

@ -7,6 +7,7 @@ const clientOptions = {
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN,
// debug: true
proxy: process.env.PROXY || null,
};
const browserClient = async ({ text, progressCallback, convo }) => {

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};

View file

@ -5,6 +5,7 @@ const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};

View file

@ -1,28 +1,32 @@
const express = require('express');
const dbConnect = require('../models/dbConnect');
const path = require('path');
const cors = require('cors');
const routes = require('./routes');
const app = express();
const port = process.env.PORT || 3080;
const projectPath = path.join(__dirname, '..', '..', 'client');
dbConnect().then(() => console.log('Connected to MongoDB'));
app.use(cors());
app.use(express.json());
app.use(express.static(path.join(projectPath, 'public')));
app.get('/', function (req, res) {
console.log(path.join(projectPath, 'public', 'index.html'));
res.sendFile(path.join(projectPath, 'public', 'index.html'));
});
app.use('/api/ask', routes.ask);
app.use('/api/messages', routes.messages);
app.use('/api/convos', routes.convos);
app.use('/api/customGpts', routes.customGpts);
app.use('/api/prompts', routes.prompts);
app.listen(port, () => {
console.log(`Server listening at http://localhost:${port}`);
});
const express = require('express');
const dbConnect = require('../models/dbConnect');
const path = require('path');
const cors = require('cors');
const routes = require('./routes');
const app = express();
const port = process.env.PORT || 3080;
const host = process.env.HOST || 'localhost'
const projectPath = path.join(__dirname, '..', '..', 'client');
dbConnect().then(() => console.log('Connected to MongoDB'));
app.use(cors());
app.use(express.json());
app.use(express.static(path.join(projectPath, 'public')));
app.get('/', function (req, res) {
console.log(path.join(projectPath, 'public', 'index.html'));
res.sendFile(path.join(projectPath, 'public', 'index.html'));
});
app.use('/api/ask', routes.ask);
app.use('/api/messages', routes.messages);
app.use('/api/convos', routes.convos);
app.use('/api/customGpts', routes.customGpts);
app.use('/api/prompts', routes.prompts);
app.listen(port, host, () => {
if (host=='0.0.0.0')
console.log(`Server listening on all interface at port ${port}. Use http://localhost:${port} to access it`);
else
console.log(`Server listening at http://${host=='0.0.0.0'?'localhost':host}:${port}`);
});