How to fix INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False
import os
import logging
import openai
from gpt_index import SimpleDirectoryReader, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
from langchain.llms import AzureOpenAI
from flask_cors import CORS
from flask_swagger_ui import get_swaggerui_blueprint
from flask import Flask, request, jsonify
app = Flask(__name__)
CORS(app)
os.environ["OPENAI_API_TYPE"] = "azure"
os.environ["OPENAI_API_KEY"] = 'Azure-api-key'
os.environ["OPENAI_API_BASE"] = 'https://chat-gpt-dev.openai.azure.com/'
os.environ["OPENAI_API_VERSION"]="2023-03-15-preview"
def load():
max_input_size = 5000
num_outputs = 4096
max_chunk_overlap = 20
chunk_size_limit = 5000
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
llm_predictor = LLMPredictor(llm=AzureOpenAI(temperature=10, model_name="gpt35", max_tokens=num_outputs,))
documents = SimpleDirectoryReader(r"data").load_data()
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)
index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
index.save_to_disk('index.json')
return jsonify({'status': 'success', 'response': 'Data loaded successfully'})
load()
@app.route("/chatgpt", methods=['GET'])
def chatbot():
index = GPTSimpleVectorIndex.load_from_disk('index.json')
question = request.args.get('question')
if question:
answer = index.query(question, response_mode="compact")
return jsonify({'status': 'success', 'response': str(answer)})
else:
return "No question provided"
if __name__ == "__main__":
app.run(host='0.0.0.0', port=8000, threaded=True, debug=True)
Getting error as below: USING API_BASE: https://chat-gpt-dev.openai.azure.com/ INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False INFO:openai:error_code=404 error_message='Resource not found' error_param=None error_type=None message='OpenAI API error received' stream_error=False Traceback (most recent call last): File "D:\azure_openAi.venv\Lib\site-packages\tenacity_init_.py", line 382, in call result = fn(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\embeddings\openai.py", line 147, in get_embeddings
data = openai.Embedding.create(input=list_of_text, engine=engine).data
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\openai\api_resources\embedding.py", line 33, in create
response = super().create(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "D:\azure_openAi.venv\Lib\site-packages\openai\api_requestor.py", line 679, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: Resource not found
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "d:\azure_openAi\app.py", line 49, in <module>
load()
File "d:\azure_openAi\app.py", line 45, in load
index = GPTSimpleVectorIndex.from_documents(documents, service_context=service_context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\base.py", line 105, in from_documents
return cls(
^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\vector_indices.py", line 69, in init
super().__init__(
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\base.py", line 54, in init
super().__init__(
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\base.py", line 74, in init
index_struct = self.build_index_from_nodes(nodes)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\token_counter\token_counter.py", line 78, in wrapped_llm_predict
f_return_val = f(_self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\base.py", line 217, in build_index_from_nodes
return self._build_index_from_nodes(nodes)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\base.py", line 206, in _build_index_from_nodes
self._add_nodes_to_index(index_struct, nodes)
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\base.py", line 178, in _add_nodes_to_index
embedding_results = self._get_node_embedding_results(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\indices\vector_store\base.py", line 96, in _get_node_embedding_results
) = self._service_context.embed_model.get_queued_text_embeddings()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\embeddings\base.py", line 155, in get_queued_text_embeddings
embeddings = self._get_text_embeddings(cur_batch_texts)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\gpt_index\embeddings\openai.py", line 261, in _get_text_embeddings
embeddings = get_embeddings(texts, engine=engine)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\tenacity_init_.py", line 289, in wrapped_f
return self(f, *args, **kw)
^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\tenacity_init_.py", line 379, in call do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\azure_openAi.venv\Lib\site-packages\tenacity_init_.py", line 326, in iter
raise retry_exc from fut.exception()
tenacity.RetryError: RetryError[<Future at 0x248b31bf190 state=finished raised InvalidRequestError>]