in src/zretail_demo/zretail_demo_frontend/src/zcl_product_search_qic.clas.abap [0:0]
METHOD get_sql_for_attr_search.
DATA lo_client TYPE REF TO /goog/cl_aiplatform_v1.
DATA lv_p_projects_id TYPE string.
DATA lv_p_locations_id TYPE string.
DATA lv_p_publishers_id TYPE string.
DATA lv_p_models_id TYPE string.
DATA ls_input TYPE /goog/cl_aiplatform_v1=>ty_726.
DATA ls_raw TYPE REF TO data.
DATA ls_output TYPE /goog/cl_aiplatform_v1=>ty_727.
DATA lv_ret_code TYPE i.
DATA lv_err_text TYPE string.
DATA ls_err_resp TYPE /goog/err_resp.
DATA lv_msg TYPE string.
DATA lo_exception TYPE REF TO /goog/cx_sdk.
DATA es_raw TYPE string.
TRY.
" Open HTTP Connection
lo_client = NEW #( iv_key_name = 'RETAIL_DEMO' ).
" Populate relevant parameters
lv_p_projects_id = lo_client->gv_project_id.
lv_p_locations_id = 'us-central1'.
lv_p_publishers_id = 'google'.
lv_p_models_id = 'gemini-1.5-pro-preview-0514'.
" Set the Model Parameters and Prompt
ls_input = VALUE #( generation_config = VALUE #( max_output_tokens = 8000
temperature = '1'
top_p = '0.8'
top_k = '40' )
contents = VALUE #( ( role = 'user'
parts = VALUE #( ( text = iv_prompt ) ) ) ) ).
" Call Gemini Pro to identify sentiments.
lo_client->generate_content_models( EXPORTING iv_p_projects_id = lv_p_projects_id
iv_p_locations_id = lv_p_locations_id
iv_p_publishers_id = lv_p_publishers_id
iv_p_models_id = lv_p_models_id
is_input = ls_input
IMPORTING es_raw = es_raw
es_output = ls_output
ev_ret_code = lv_ret_code
ev_err_text = lv_err_text
es_err_resp = ls_err_resp ).
IF lo_client->is_success( lv_ret_code ) = abap_true.
LOOP AT ls_output-candidates INTO DATA(ls_candidate).
LOOP AT ls_candidate-content-parts INTO DATA(ls_part).
rv_query = ls_part-text.
REPLACE ALL OCCURRENCES OF 'sql' IN rv_query WITH space.
REPLACE ALL OCCURRENCES OF '`' IN rv_query WITH space.
REPLACE ALL OCCURRENCES OF '#' IN rv_query WITH space.
EXIT.
ENDLOOP.
EXIT.
ENDLOOP.
ENDIF.
" Close HTTP Connection
lo_client->close( ).
CATCH /goog/cx_sdk.
ENDTRY.