@@ -73,130 +73,113 @@ function start_services() {
73
73
done
74
74
}
75
75
76
- function validate_microservices() {
77
- # Check if the microservices are running correctly.
78
- # TODO: Any results check required??
79
- curl ${ip_address} :8090/embed \
80
- -X POST \
81
- -d ' {"inputs":"What is Deep Learning?"}' \
82
- -H ' Content-Type: application/json' > ${LOG_PATH} /embed.log
83
- exit_code=$?
84
- if [ $exit_code -ne 0 ]; then
85
- echo " Microservice failed, please check the logs in artifacts!"
86
- docker logs tei-embedding-gaudi-server >> ${LOG_PATH} /embed.log
87
- exit 1
88
- fi
89
- sleep 1s
90
-
91
- curl http://${ip_address} :6000/v1/embeddings \
92
- -X POST \
93
- -d ' {"text":"hello"}' \
94
- -H ' Content-Type: application/json' > ${LOG_PATH} /embeddings.log
95
- exit_code=$?
96
- if [ $exit_code -ne 0 ]; then
97
- echo " Microservice failed, please check the logs in artifacts!"
98
- docker logs embedding-tei-server >> ${LOG_PATH} /embeddings.log
99
- exit 1
100
- fi
101
- sleep 1s
102
-
103
- export PATH=" ${HOME} /miniforge3/bin:$PATH "
104
- source activate
105
- test_embedding=$( python -c " import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)" )
106
- curl http://${ip_address} :7000/v1/retrieval \
107
- -X POST \
108
- -d ' {"text":"test","embedding":${test_embedding}}' \
109
- -H ' Content-Type: application/json' > ${LOG_PATH} /retrieval.log
110
- exit_code=$?
111
- if [ $exit_code -ne 0 ]; then
112
- echo " Microservice failed, please check the logs in artifacts!"
113
- docker logs retriever-redis-server >> ${LOG_PATH} /retrieval.log
114
- exit 1
115
- fi
116
- sleep 1s
117
-
118
- curl http://${ip_address} :8808/rerank \
119
- -X POST \
120
- -d ' {"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}' \
121
- -H ' Content-Type: application/json' > ${LOG_PATH} /rerank.log
122
- exit_code=$?
123
- if [ $exit_code -ne 0 ]; then
124
- echo " Microservice failed, please check the logs in artifacts!"
125
- docker logs tei-xeon-server >> ${LOG_PATH} /rerank.log
76
+ function validate_services() {
77
+ local URL=" $1 "
78
+ local EXPECTED_RESULT=" $2 "
79
+ local SERVICE_NAME=" $3 "
80
+ local DOCKER_NAME=" $4 "
81
+ local INPUT_DATA=" $5 "
82
+
83
+ local HTTP_STATUS=$( curl -s -o /dev/null -w " %{http_code}" -X POST -d " $INPUT_DATA " -H ' Content-Type: application/json' " $URL " )
84
+ if [ " $HTTP_STATUS " -eq 200 ]; then
85
+ echo " [ $SERVICE_NAME ] HTTP status is 200. Checking content..."
86
+
87
+ local CONTENT=$( curl -s -X POST -d " $INPUT_DATA " -H ' Content-Type: application/json' " $URL " | tee ${LOG_PATH} /${SERVICE_NAME} .log)
88
+
89
+ if echo " $CONTENT " | grep -q " $EXPECTED_RESULT " ; then
90
+ echo " [ $SERVICE_NAME ] Content is as expected."
91
+ else
92
+ echo " [ $SERVICE_NAME ] Content does not match the expected result: $CONTENT "
93
+ docker logs ${DOCKER_NAME} >> ${LOG_PATH} /${SERVICE_NAME} .log
94
+ exit 1
95
+ fi
96
+ else
97
+ echo " [ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS "
98
+ docker logs ${DOCKER_NAME} >> ${LOG_PATH} /${SERVICE_NAME} .log
126
99
exit 1
127
100
fi
128
101
sleep 1s
102
+ }
129
103
130
- curl http://${ip_address} :8000/v1/reranking \
131
- -X POST \
132
- -d ' {"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}' \
133
- -H ' Content-Type: application/json' > ${LOG_PATH} /reranking.log
134
- exit_code=$?
135
- if [ $exit_code -ne 0 ]; then
136
- echo " Microservice failed, please check the logs in artifacts!"
137
- docker logs reranking-tei-gaudi-server >> ${LOG_PATH} /reranking.log
138
- exit 1
139
- fi
140
- sleep 1s
104
+ function validate_microservices() {
105
+ # Check if the microservices are running correctly.
141
106
142
- curl http://${ip_address} :8008/generate \
143
- -X POST \
144
- -d ' {"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":64, "do_sample": true}}' \
145
- -H ' Content-Type: application/json' > ${LOG_PATH} /generate.log
146
- exit_code=$?
147
- if [ $exit_code -ne 0 ]; then
148
- echo " Microservice failed, please check the logs in artifacts!"
149
- docker logs tgi-gaudi-server >> ${LOG_PATH} /generate.log
150
- exit 1
151
- fi
152
- sleep 1s
107
+ # tei for embedding service
108
+ validate_services \
109
+ " ${ip_address} :8090/embed" \
110
+ " \[\[" \
111
+ " tei-embedding" \
112
+ " tei-embedding-gaudi-server" \
113
+ ' {"inputs":"What is Deep Learning?"}'
114
+
115
+ # embedding microservice
116
+ validate_services \
117
+ " ${ip_address} :6000/v1/embeddings" \
118
+ ' "text":"What is Deep Learning?","embedding":\[' \
119
+ " embedding" \
120
+ " embedding-tei-server" \
121
+ ' {"text":"What is Deep Learning?"}'
122
+
123
+ sleep 1m # retrieval can't curl as expected, try to wait for more time
124
+
125
+ # retrieval microservice
126
+ test_embedding=$( python3 -c " import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)" )
127
+ validate_services \
128
+ " ${ip_address} :7000/v1/retrieval" \
129
+ " " \
130
+ " retrieval" \
131
+ " retriever-redis-server" \
132
+ " {\" text\" :\" What is the revenue of Nike in 2023?\" ,\" embedding\" :${test_embedding} }"
133
+
134
+ # tei for rerank microservice
135
+ validate_services \
136
+ " ${ip_address} :8808/rerank" \
137
+ ' {"index":1,"score":' \
138
+ " tei-rerank" \
139
+ " tei-reranking-gaudi-server" \
140
+ ' {"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}'
141
+
142
+ # rerank microservice
143
+ validate_services \
144
+ " ${ip_address} :8000/v1/reranking" \
145
+ " Deep learning is..." \
146
+ " rerank" \
147
+ " reranking-tei-gaudi-server" \
148
+ ' {"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}'
149
+
150
+ # tgi for llm service
151
+ validate_services \
152
+ " ${ip_address} :8008/generate" \
153
+ " generated_text" \
154
+ " tgi-llm" \
155
+ " tgi-gaudi-server" \
156
+ ' {"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}'
157
+
158
+ # llm microservice
159
+ validate_services \
160
+ " ${ip_address} :9000/v1/chat/completions" \
161
+ " data: " \
162
+ " llm" \
163
+ " llm-tgi-gaudi-server" \
164
+ ' {"query":"What is Deep Learning?"}'
153
165
154
- curl http://${ip_address} :9000/v1/chat/completions \
155
- -X POST \
156
- -d ' {"text":"What is Deep Learning?"}' \
157
- -H ' Content-Type: application/json' > ${LOG_PATH} /completions.log
158
- exit_code=$?
159
- if [ $exit_code -ne 0 ]; then
160
- echo " Microservice failed, please check the logs in artifacts!"
161
- docker logs llm-tgi-gaudi-server >> ${LOG_PATH} /completions.log
162
- exit 1
163
- fi
164
- sleep 1s
165
166
}
166
167
167
168
function validate_megaservice() {
168
169
# Curl the Mega Service
169
- curl http://${ip_address} :8888/v1/chatqna -H " Content-Type: application/json" -d ' {
170
- "messages": "What is the revenue of Nike in 2023?"}' > ${LOG_PATH} /curl_megaservice.log
171
- exit_code=$?
172
- if [ $exit_code -ne 0 ]; then
173
- echo " Megaservice failed, please check the logs in artifacts!"
174
- docker logs chatqna-gaudi-backend-server >> ${LOG_PATH} /curl_megaservice.log
175
- exit 1
176
- fi
177
-
178
- echo " Checking response results, make sure the output is reasonable. "
179
- local status=false
180
- if [[ -f $LOG_PATH /curl_megaservice.log ]] &&
181
- [[ $( grep -c " billion" $LOG_PATH /curl_megaservice.log) != 0 ]]; then
182
- status=true
183
- fi
184
-
185
- if [ $status == false ]; then
186
- echo " Response check failed, please check the logs in artifacts!"
187
- exit 1
188
- else
189
- echo " Response check succeed!"
190
- fi
170
+ validate_services \
171
+ " ${ip_address} :8888/v1/chatqna" \
172
+ " billion" \
173
+ " mega-chatqna" \
174
+ " chatqna-gaudi-backend-server" \
175
+ ' {"messages": "What is the revenue of Nike in 2023?"}'
191
176
192
- echo " Checking response format, make sure the output format is acceptable for UI."
193
- # TODO
194
177
}
195
178
196
179
function validate_frontend() {
197
180
cd $WORKPATH /docker/ui/svelte
198
181
local conda_env_name=" ChatQnA_e2e"
199
- export PATH=${HOME} /miniconda3 /bin/:$PATH
182
+ export PATH=${HOME} /miniforge3 /bin/:$PATH
200
183
conda remove -n ${conda_env_name} --all -y
201
184
conda create -n ${conda_env_name} python=3.12 -y
202
185
source activate ${conda_env_name}
0 commit comments