t/plugin/ai-proxy-multi2.t (71 lines of code) (raw):

# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # use t::APISIX 'no_plan'; log_level("info"); repeat_each(1); no_long_string(); no_root_location(); my $resp_file = 't/assets/ai-proxy-response.json'; open(my $fh, '<', $resp_file) or die "Could not open file '$resp_file' $!"; my $resp = do { local $/; <$fh> }; close($fh); print "Hello, World!\n"; print $resp; add_block_preprocessor(sub { my ($block) = @_; if (!defined $block->request) { $block->set_value("request", "GET /t"); } my $user_yaml_config = <<_EOC_; plugins: - ai-proxy-multi _EOC_ $block->set_value("extra_yaml_config", $user_yaml_config); my $http_config = $block->http_config // <<_EOC_; server { server_name openai; listen 6724; default_type 'application/json'; location /v1/chat/completions { content_by_lua_block { local json = require("cjson.safe") if ngx.req.get_method() ~= "POST" then ngx.status = 400 ngx.say("Unsupported request method: ", ngx.req.get_method()) end ngx.req.read_body() local body, err = ngx.req.get_body_data() body, err = json.decode(body) local query_auth = ngx.req.get_uri_args()["api_key"] if query_auth ~= "apikey" then ngx.status = 401 ngx.say("Unauthorized") return end ngx.status = 200 ngx.say("passed") } } location /test/params/in/overridden/endpoint { content_by_lua_block { local json = require("cjson.safe") local core = require("apisix.core") if ngx.req.get_method() ~= "POST" then ngx.status = 400 ngx.say("Unsupported request method: ", ngx.req.get_method()) end local query_auth = ngx.req.get_uri_args()["api_key"] ngx.log(ngx.INFO, "found query params: ", core.json.stably_encode(ngx.req.get_uri_args())) if query_auth ~= "apikey" then ngx.status = 401 ngx.say("Unauthorized") return end ngx.status = 200 ngx.say("passed") } } } _EOC_ $block->set_value("http_config", $http_config); }); run_tests(); __DATA__ === TEST 1: set route with wrong query param --- config location /t { content_by_lua_block { local t = require("lib.test_admin").test local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, [[{ "uri": "/anything", "plugins": { "ai-proxy-multi": { "instances": [ { "name": "openai-official", "provider": "openai", "weight": 1, "auth": { "query": { "api_key": "wrong_key" } }, "options": { "model": "gpt-35-turbo-instruct", "max_tokens": 512, "temperature": 1.0 }, "override": { "endpoint": "http://localhost:6724" } } ], "ssl_verify": false } } }]] ) if code >= 300 then ngx.status = code end ngx.say(body) } } --- response_body passed === TEST 2: send request --- request POST /anything { "messages": [ { "role": "system", "content": "You are a mathematician" }, { "role": "user", "content": "What is 1+1?"} ] } --- error_code: 401 --- response_body Unauthorized === TEST 3: set route with right query param --- config location /t { content_by_lua_block { local t = require("lib.test_admin").test local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, [[{ "uri": "/anything", "plugins": { "ai-proxy-multi": { "instances": [ { "name": "openai-official", "provider": "openai", "weight": 1, "auth": { "query": { "api_key": "apikey" } }, "options": { "model": "gpt-35-turbo-instruct", "max_tokens": 512, "temperature": 1.0 }, "override": { "endpoint": "http://localhost:6724" } } ], "ssl_verify": false } } }]] ) if code >= 300 then ngx.status = code end ngx.say(body) } } --- response_body passed === TEST 4: send request --- request POST /anything { "messages": [ { "role": "system", "content": "You are a mathematician" }, { "role": "user", "content": "What is 1+1?"} ] } --- error_code: 200 --- response_body passed === TEST 5: set route without overriding the endpoint_url --- config location /t { content_by_lua_block { local t = require("lib.test_admin").test local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, [[{ "uri": "/anything", "plugins": { "ai-proxy-multi": { "instances": [ { "name": "openai-official", "provider": "openai", "weight": 1, "auth": { "header": { "Authorization": "some-key" } }, "options": { "model": "gpt-35-turbo-instruct", "max_tokens": 512, "temperature": 1.0 } } ], "ssl_verify": false } }, "upstream": { "type": "roundrobin", "nodes": { "httpbin.org": 1 } } }]] ) if code >= 300 then ngx.status = code end ngx.say(body) } } --- response_body passed === TEST 6: send request --- custom_trusted_cert: /etc/ssl/certs/ca-certificates.crt --- request POST /anything { "messages": [ { "role": "system", "content": "You are a mathematician" }, { "role": "user", "content": "What is 1+1?"} ] } --- error_code: 401 === TEST 7: query params in override.endpoint should be sent to LLM --- config location /t { content_by_lua_block { local t = require("lib.test_admin").test local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, [[{ "uri": "/anything", "plugins": { "ai-proxy-multi": { "instances": [ { "name": "openai-official", "provider": "openai", "weight": 1, "auth": { "query": { "api_key": "apikey" } }, "options": { "model": "gpt-35-turbo-instruct", "max_tokens": 512, "temperature": 1.0 }, "override": { "endpoint": "http://localhost:6724/test/params/in/overridden/endpoint?some_query=yes" } } ], "ssl_verify": false } } }]] ) if code >= 300 then ngx.status = code end ngx.say(body) } } --- response_body passed === TEST 8: send request --- request POST /anything { "messages": [ { "role": "system", "content": "You are a mathematician" }, { "role": "user", "content": "What is 1+1?"} ] } --- error_code: 200 --- error_log found query params: {"api_key":"apikey","some_query":"yes"} --- response_body passed