diff --git a/ChatQnA/docker/gaudi/README.md b/ChatQnA/docker/gaudi/README.md index f32d121308..91df664fd1 100644 --- a/ChatQnA/docker/gaudi/README.md +++ b/ChatQnA/docker/gaudi/README.md @@ -124,9 +124,7 @@ Build frontend Docker image that enables Conversational experience with ChatQnA ```bash cd GenAIExamples/ChatQnA/docker/ui/ -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" -export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" -docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT --build-arg DATAPREP_SERVICE_ENDPOINT=$DATAPREP_SERVICE_ENDPOINT -f ./docker/Dockerfile.react . +docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile.react . cd ../../../.. ``` @@ -426,9 +424,8 @@ chaqna-gaudi-conversation-ui-server: image: opea/chatqna-conversation-ui:latest container_name: chatqna-gaudi-conversation-ui-server environment: - - no_proxy=${no_proxy} - - https_proxy=${https_proxy} - - http_proxy=${http_proxy} + - APP_BACKEND_SERVICE_ENDPOINT=${BACKEND_SERVICE_ENDPOINT} + - APP_DATA_PREP_SERVICE_URL=${DATAPREP_SERVICE_ENDPOINT} ports: - "5174:80" depends_on: diff --git a/ChatQnA/docker/gpu/README.md b/ChatQnA/docker/gpu/README.md index a5130b5e5c..41c2aef329 100644 --- a/ChatQnA/docker/gpu/README.md +++ b/ChatQnA/docker/gpu/README.md @@ -64,6 +64,16 @@ docker build --no-cache -t opea/chatqna-ui:latest --build-arg https_proxy=$https cd ../../../.. ``` +### 9. Build React UI Docker Image (Optional) + +Construct the frontend Docker image using the command below: + +```bash +cd GenAIExamples/ChatQnA/docker/ui/ +docker build --no-cache -t opea/chatqna-react-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile.react . +cd ../../../.. +``` + Then run the command `docker images`, you will have the following 7 Docker Images: 1. `opea/embedding-tei:latest` @@ -73,6 +83,7 @@ Then run the command `docker images`, you will have the following 7 Docker Image 5. `opea/dataprep-redis:latest` 6. `opea/chatqna:latest` 7. `opea/chatqna-ui:latest` +8. `opea/chatqna-react-ui:latest` ## 🚀 Start MicroServices and MegaService @@ -255,4 +266,33 @@ To access the frontend, open the following URL in your browser: http://{host_ip} - "80:5173" ``` +## 🚀 Launch the Conversational UI (Optional) + +To access the Conversational UI (react based) frontend, modify the UI service in the `compose.yaml` file. Replace `chaqna-xeon-ui-server` service with the `chatqna-xeon-conversation-ui-server` service as per the config below: + +```yaml +chaqna-xeon-conversation-ui-server: + image: opea/chatqna-conversation-ui:latest + container_name: chatqna-xeon-conversation-ui-server + environment: + - APP_BACKEND_SERVICE_ENDPOINT=${BACKEND_SERVICE_ENDPOINT} + - APP_DATA_PREP_SERVICE_URL=${DATAPREP_SERVICE_ENDPOINT} + ports: + - "5174:80" + depends_on: + - chaqna-xeon-backend-server + ipc: host + restart: always +``` + +Once the services are up, open the following URL in your browser: http://{host_ip}:5174. By default, the UI runs on port 80 internally. If you prefer to use a different host port to access the frontend, you can modify the port mapping in the `compose.yaml` file as shown below: + +```yaml + chaqna-xeon-conversation-ui-server: + image: opea/chatqna-conversation-ui:latest + ... + ports: + - "80:80" +``` + ![project-screenshot](../../assets/img/chat_ui_init.png) diff --git a/ChatQnA/docker/ui/docker/Dockerfile.react b/ChatQnA/docker/ui/docker/Dockerfile.react index 8ec70c6577..f023b7afbc 100644 --- a/ChatQnA/docker/ui/docker/Dockerfile.react +++ b/ChatQnA/docker/ui/docker/Dockerfile.react @@ -4,23 +4,18 @@ # Use node 20.11.1 as the base image FROM node:20.11.1 as vite-app -COPY . /usr/app +COPY ./react /usr/app/react WORKDIR /usr/app/react -ARG BACKEND_SERVICE_ENDPOINT -ARG DATAPREP_SERVICE_ENDPOINT -ENV VITE_BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT -ENV VITE_DATA_PREP_SERVICE_URL=$DATAPREP_SERVICE_ENDPOINT RUN ["npm", "install"] RUN ["npm", "run", "build"] FROM nginx:alpine -EXPOSE 80 - -COPY --from=vite-app /usr/app/react/nginx.conf /etc/nginx/conf.d/default.conf COPY --from=vite-app /usr/app/react/dist /usr/share/nginx/html +COPY ./react/env.sh /docker-entrypoint.d/env.sh -ENTRYPOINT ["nginx", "-g", "daemon off;"] \ No newline at end of file +COPY ./react/nginx.conf /etc/nginx/conf.d/default.conf +RUN chmod +x /docker-entrypoint.d/env.sh \ No newline at end of file diff --git a/ChatQnA/docker/ui/react/.env.production b/ChatQnA/docker/ui/react/.env.production new file mode 100644 index 0000000000..a46e1e3850 --- /dev/null +++ b/ChatQnA/docker/ui/react/.env.production @@ -0,0 +1,2 @@ +VITE_BACKEND_SERVICE_ENDPOINT=APP_BACKEND_SERVICE_ENDPOINT +VITE_DATA_PREP_SERVICE_URL=APP_DATA_PREP_SERVICE_URL \ No newline at end of file diff --git a/ChatQnA/docker/ui/react/env.sh b/ChatQnA/docker/ui/react/env.sh new file mode 100644 index 0000000000..ce1372ea68 --- /dev/null +++ b/ChatQnA/docker/ui/react/env.sh @@ -0,0 +1,15 @@ +#!/bin/sh +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +for i in $(env | grep APP_) #// Make sure to use the prefix MY_APP_ if you have any other prefix in env.production file variable name replace it with MY_APP_ +do + key=$(echo $i | cut -d '=' -f 1) + value=$(echo $i | cut -d '=' -f 2-) + echo $key=$value + # sed All files + # find /usr/share/nginx/html -type f -exec sed -i "s|${key}|${value}|g" '{}' + + + # sed JS and CSS only + find /usr/share/nginx/html -type f \( -name '*.js' -o -name '*.css' \) -exec sed -i "s|${key}|${value}|g" '{}' + +done diff --git a/ChatQnA/docker/xeon/README.md b/ChatQnA/docker/xeon/README.md index e247cedbdc..b607e8532e 100644 --- a/ChatQnA/docker/xeon/README.md +++ b/ChatQnA/docker/xeon/README.md @@ -145,10 +145,7 @@ Build frontend Docker image that enables Conversational experience with ChatQnA ```bash cd GenAIExamples/ChatQnA/docker/ui/ -export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/chatqna" -export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" -export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get_file" -docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy --build-arg BACKEND_SERVICE_ENDPOINT=$BACKEND_SERVICE_ENDPOINT --build-arg DATAPREP_SERVICE_ENDPOINT=$DATAPREP_SERVICE_ENDPOINT --build-arg DATAPREP_GET_FILE_ENDPOINT=$DATAPREP_GET_FILE_ENDPOINT -f ./docker/Dockerfile.react . +docker build --no-cache -t opea/chatqna-conversation-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile.react . cd ../../../.. ``` @@ -396,20 +393,19 @@ To access the frontend, open the following URL in your browser: http://{host_ip} ## 🚀 Launch the Conversational UI (Optional) -To access the Conversational UI (react based) frontend, modify the UI service in the `compose.yaml` file. Replace `chaqna-gaudi-ui-server` service with the `chatqna-gaudi-conversation-ui-server` service as per the config below: +To access the Conversational UI (react based) frontend, modify the UI service in the `compose.yaml` file. Replace `chaqna-xeon-ui-server` service with the `chatqna-xeon-conversation-ui-server` service as per the config below: ```yaml -chaqna-gaudi-conversation-ui-server: +chaqna-xeon-conversation-ui-server: image: opea/chatqna-conversation-ui:latest - container_name: chatqna-gaudi-conversation-ui-server + container_name: chatqna-xeon-conversation-ui-server environment: - - no_proxy=${no_proxy} - - https_proxy=${https_proxy} - - http_proxy=${http_proxy} + - APP_BACKEND_SERVICE_ENDPOINT=${BACKEND_SERVICE_ENDPOINT} + - APP_DATA_PREP_SERVICE_URL=${DATAPREP_SERVICE_ENDPOINT} ports: - "5174:80" depends_on: - - chaqna-gaudi-backend-server + - chaqna-xeon-backend-server ipc: host restart: always ```