Skip to content

Commit d1b5113

Browse files
committed
Integrate CodeGen set_env to ut scripts.
Add README.md for CodeGen UT scripts. Optimization test check. Signed-off-by: ZePan110 <[email protected]>
1 parent c2e9a25 commit d1b5113

File tree

7 files changed

+50
-94
lines changed

7 files changed

+50
-94
lines changed

CodeGen/docker_compose/amd/gpu/rocm/set_env.sh

+3-3
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# SPDX-License-Identifier: Apache-2.0
66

77
### The IP address or domain name of the server on which the application is running
8-
export HOST_IP=''
9-
export EXTERNAL_HOST_IP=''
8+
export HOST_IP=${ip_address}
9+
export EXTERNAL_HOST_IP=${ip_address}
1010

1111
### The port of the TGI service. On this port, the TGI service will accept connections
1212
export CODEGEN_TGI_SERVICE_PORT=8028
@@ -36,4 +36,4 @@ export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND
3636
export CODEGEN_LLM_SERVICE_HOST_IP=${HOST_IP}
3737

3838
### The CodeGen service UI port
39-
export CODEGEN_UI_SERVICE_PORT=18151
39+
export CODEGEN_UI_SERVICE_PORT=5173

CodeGen/docker_compose/amd/gpu/rocm/set_env_vllm.sh

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# SPDX-License-Identifier: Apache-2.0
66

77
### The IP address or domain name of the server on which the application is running
8-
export HOST_IP=''
9-
export EXTERNAL_HOST_IP=''
8+
export HOST_IP=${ip_address}
9+
export EXTERNAL_HOST_IP=${ip_address}
1010

1111
### The port of the vLLM service. On this port, the TGI service will accept connections
1212
export CODEGEN_VLLM_SERVICE_PORT=8028
@@ -25,7 +25,7 @@ export CODEGEN_LLM_SERVICE_PORT=9000
2525
export CODEGEN_MEGA_SERVICE_HOST_IP=${HOST_IP}
2626

2727
### The port for CodeGen backend service
28-
export CODEGEN_BACKEND_SERVICE_PORT=18150
28+
export CODEGEN_BACKEND_SERVICE_PORT=7778
2929

3030
### The URL of CodeGen backend service, used by the frontend service
3131
export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen"
@@ -34,4 +34,4 @@ export CODEGEN_BACKEND_SERVICE_URL="http://${EXTERNAL_HOST_IP}:${CODEGEN_BACKEND
3434
export CODEGEN_LLM_SERVICE_HOST_IP=${HOST_IP}
3535

3636
### The CodeGen service UI port
37-
export CODEGEN_UI_SERVICE_PORT=18151
37+
export CODEGEN_UI_SERVICE_PORT=5173

CodeGen/tests/README.md

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
# CodeGen E2E test scripts
2+
3+
## Set the required environment variable
4+
5+
```bash
6+
export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token"
7+
```
8+
9+
## Run test
10+
11+
On Intel Xeon with TGI:
12+
13+
```bash
14+
bash test_compose_on_xeon.sh
15+
```
16+
17+
On Intel Gaudi with TGI:
18+
19+
```bash
20+
bash test_compose_on_gaudi.sh
21+
```
22+
23+
On AMD ROCm with TGI:
24+
25+
```bash
26+
bash test_compose_on_rocm.sh
27+
```
28+
29+
On AMD ROCm with vLLM:
30+
31+
```bash
32+
bash test_compose_vllm_on_rocm.sh
33+
```

CodeGen/tests/test_compose_on_gaudi.sh

+4-33
Original file line numberDiff line numberDiff line change
@@ -10,21 +10,11 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
1010
export REGISTRY=${IMAGE_REPO}
1111
export TAG=${IMAGE_TAG}
1212
export MODEL_CACHE=${model_cache:-"./data"}
13-
export REDIS_DB_PORT=6379
14-
export REDIS_INSIGHTS_PORT=8001
15-
export REDIS_RETRIEVER_PORT=7000
16-
export EMBEDDER_PORT=6000
17-
export TEI_EMBEDDER_PORT=8090
18-
export DATAPREP_REDIS_PORT=6007
1913

2014
WORKPATH=$(dirname "$PWD")
2115
LOG_PATH="$WORKPATH/tests"
2216
ip_address=$(hostname -I | awk '{print $1}')
2317

24-
export http_proxy=${http_proxy}
25-
export https_proxy=${https_proxy}
26-
export no_proxy=${no_proxy},${ip_address}
27-
2818
function build_docker_images() {
2919
opea_branch=${opea_branch:-"main"}
3020
# If the opea_branch isn't main, replace the git clone branch in Dockerfile.
@@ -58,29 +48,10 @@ function start_services() {
5848
local compose_profile="$1"
5949
local llm_container_name="$2"
6050

61-
cd $WORKPATH/docker_compose/intel/hpu/gaudi
62-
63-
export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
64-
export LLM_ENDPOINT="http://${ip_address}:8028"
51+
cd $WORKPATH/docker_compose
6552
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
66-
export MEGA_SERVICE_PORT=7778
67-
export MEGA_SERVICE_HOST_IP=${ip_address}
68-
export LLM_SERVICE_HOST_IP=${ip_address}
69-
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:${MEGA_SERVICE_PORT}/v1/codegen"
70-
export NUM_CARDS=1
71-
export host_ip=${ip_address}
72-
73-
export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}"
74-
export RETRIEVAL_SERVICE_HOST_IP=${host_ip}
75-
export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS"
76-
export INDEX_NAME="CodeGen"
77-
78-
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
79-
export TEI_EMBEDDING_HOST_IP=${host_ip}
80-
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}"
81-
export DATAPREP_ENDPOINT="http://${host_ip}:${DATAPREP_REDIS_PORT}/v1/dataprep"
82-
83-
export INDEX_NAME="CodeGen"
53+
source set_env.sh
54+
cd intel/hpu/gaudi
8455

8556
# Start Docker Containers
8657
docker compose --profile ${compose_profile} up -d | tee ${LOG_PATH}/start_services_with_compose.log
@@ -176,7 +147,7 @@ function validate_megaservice() {
176147
# Curl the Mega Service with index_name and agents_flag
177148
validate_services \
178149
"${ip_address}:7778/v1/codegen" \
179-
"" \
150+
"fingerprint" \
180151
"mega-codegen" \
181152
"codegen-gaudi-backend-server" \
182153
'{ "index_name": "test_redis", "agents_flag": "True", "messages": "def print_hello_world():", "max_tokens": 256}'

CodeGen/tests/test_compose_on_rocm.sh

+1-12
Original file line numberDiff line numberDiff line change
@@ -41,18 +41,7 @@ function build_docker_images() {
4141

4242
function start_services() {
4343
cd $WORKPATH/docker_compose/amd/gpu/rocm/
44-
45-
export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
46-
export CODEGEN_TGI_SERVICE_PORT=8028
47-
export CODEGEN_TGI_LLM_ENDPOINT="http://${ip_address}:${CODEGEN_TGI_SERVICE_PORT}"
48-
export CODEGEN_LLM_SERVICE_PORT=9000
49-
export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
50-
export CODEGEN_MEGA_SERVICE_HOST_IP=${ip_address}
51-
export CODEGEN_LLM_SERVICE_HOST_IP=${ip_address}
52-
export CODEGEN_BACKEND_SERVICE_PORT=7778
53-
export CODEGEN_BACKEND_SERVICE_URL="http://${ip_address}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen"
54-
export CODEGEN_UI_SERVICE_PORT=5173
55-
export HOST_IP=${ip_address}
44+
source set_env.sh
5645

5746
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env
5847

CodeGen/tests/test_compose_on_xeon.sh

+4-30
Original file line numberDiff line numberDiff line change
@@ -10,21 +10,11 @@ echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
1010
export REGISTRY=${IMAGE_REPO}
1111
export TAG=${IMAGE_TAG}
1212
export MODEL_CACHE=${model_cache:-"./data"}
13-
export REDIS_DB_PORT=6379
14-
export REDIS_INSIGHTS_PORT=8001
15-
export REDIS_RETRIEVER_PORT=7000
16-
export EMBEDDER_PORT=6000
17-
export TEI_EMBEDDER_PORT=8090
18-
export DATAPREP_REDIS_PORT=6007
1913

2014
WORKPATH=$(dirname "$PWD")
2115
LOG_PATH="$WORKPATH/tests"
2216
ip_address=$(hostname -I | awk '{print $1}')
2317

24-
export http_proxy=${http_proxy}
25-
export https_proxy=${https_proxy}
26-
export no_proxy=${no_proxy},${ip_address}
27-
2818
function build_docker_images() {
2919
opea_branch=${opea_branch:-"main"}
3020
# If the opea_branch isn't main, replace the git clone branch in Dockerfile.
@@ -60,26 +50,10 @@ function start_services() {
6050
local compose_profile="$1"
6151
local llm_container_name="$2"
6252

63-
cd $WORKPATH/docker_compose/intel/cpu/xeon/
64-
65-
export LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
66-
export LLM_ENDPOINT="http://${ip_address}:8028"
53+
cd $WORKPATH/docker_compose
6754
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
68-
export MEGA_SERVICE_PORT=7778
69-
export MEGA_SERVICE_HOST_IP=${ip_address}
70-
export LLM_SERVICE_HOST_IP=${ip_address}
71-
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:${MEGA_SERVICE_PORT}/v1/codegen"
72-
export host_ip=${ip_address}
73-
74-
export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}"
75-
export RETRIEVAL_SERVICE_HOST_IP=${host_ip}
76-
export RETRIEVER_COMPONENT_NAME="OPEA_RETRIEVER_REDIS"
77-
export INDEX_NAME="CodeGen"
78-
79-
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
80-
export TEI_EMBEDDING_HOST_IP=${host_ip}
81-
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}"
82-
export DATAPREP_ENDPOINT="http://${host_ip}:${DATAPREP_REDIS_PORT}/v1/dataprep"
55+
source set_env.sh
56+
cd intel/cpu/xeon/
8357

8458
# Start Docker Containers
8559
docker compose --profile ${compose_profile} up -d > ${LOG_PATH}/start_services_with_compose.log
@@ -175,7 +149,7 @@ function validate_megaservice() {
175149
# Curl the Mega Service with index_name and agents_flag
176150
validate_services \
177151
"${ip_address}:7778/v1/codegen" \
178-
"" \
152+
"fingerprint" \
179153
"mega-codegen" \
180154
"codegen-xeon-backend-server" \
181155
'{ "index_name": "test_redis", "agents_flag": "True", "messages": "def print_hello_world():", "max_tokens": 256}'

CodeGen/tests/test_compose_vllm_on_rocm.sh

+1-12
Original file line numberDiff line numberDiff line change
@@ -40,18 +40,7 @@ function build_docker_images() {
4040

4141
function start_services() {
4242
cd $WORKPATH/docker_compose/amd/gpu/rocm/
43-
44-
export CODEGEN_LLM_MODEL_ID="Qwen/Qwen2.5-Coder-7B-Instruct"
45-
export CODEGEN_VLLM_SERVICE_PORT=8028
46-
export CODEGEN_VLLM_ENDPOINT="http://${ip_address}:${CODEGEN_VLLM_SERVICE_PORT}"
47-
export CODEGEN_LLM_SERVICE_PORT=9000
48-
export CODEGEN_HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
49-
export CODEGEN_MEGA_SERVICE_HOST_IP=${ip_address}
50-
export CODEGEN_LLM_SERVICE_HOST_IP=${ip_address}
51-
export CODEGEN_BACKEND_SERVICE_PORT=7778
52-
export CODEGEN_BACKEND_SERVICE_URL="http://${ip_address}:${CODEGEN_BACKEND_SERVICE_PORT}/v1/codegen"
53-
export CODEGEN_UI_SERVICE_PORT=5173
54-
export HOST_IP=${ip_address}
43+
source set_env.sh
5544

5645
sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env
5746

0 commit comments

Comments
 (0)