You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I have searched the existing issues and this bug is not already filed.
My model is hosted on OpenAI or Azure. If not, please look at the "model providers" issue and don't file a new one here.
I believe this is a legitimate bug, not just a question. If this is a question, please use the Discussions area.
Describe the bug
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in map_response_single_batch
Traceback (most recent call last):
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\structured_search\global_search\search.py", line 232, in map_response_single_batch
search_response = await self.llm.agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 142, in agenerate
async for attempt in retryer:
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 166, in anext
do = await self.iter(retry_state=self.retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 153, in iter
result = await action(retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_utils.py", line 99, in inner
return call(*args, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_init.py", line 398, in
self._add_action_func(lambda rs: rs.outcome.result())
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 451, in result
return self.__get_result()
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 403, in __get_result
raise self._exception
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 144, in agenerate
return await self._agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 268, in _agenerate
response = await self.async_client.chat.completions.create( # type: ignore
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai\resources\chat\completions.py", line 1720, in create
return await self._post(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1849, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1543, in request
return await self._request(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1646, in _request
return await self._process_response(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1743, in _process_response
return await api_response.parse()
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 430, in parse
parsed = self._parse(to=to)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 265, in parse
data = response.json()
File "D:\Anaconda3\envs\demo4\lib\site-packages\httpx_models.py", line 832, in json
return jsonlib.loads(self.content, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\json_init.py", line 346, in loads
return _default_decoder.decode(s)
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in map_response_single_batch
Traceback (most recent call last):
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\structured_search\global_search\search.py", line 232, in map_response_single_batch
search_response = await self.llm.agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 142, in agenerate
async for attempt in retryer:
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 166, in anext
do = await self.iter(retry_state=self.retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 153, in iter
result = await action(retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_utils.py", line 99, in inner
return call(*args, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_init.py", line 398, in
self._add_action_func(lambda rs: rs.outcome.result())
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 451, in result
return self.__get_result()
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 403, in __get_result
raise self._exception
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 144, in agenerate
return await self._agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 268, in _agenerate
response = await self.async_client.chat.completions.create( # type: ignore
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai\resources\chat\completions.py", line 1720, in create
return await self._post(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1849, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1543, in request
return await self._request(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1646, in _request
return await self._process_response(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1743, in _process_response
return await api_response.parse()
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 430, in parse
parsed = self._parse(to=to)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 265, in parse
data = response.json()
File "D:\Anaconda3\envs\demo4\lib\site-packages\httpx_models.py", line 832, in json
return jsonlib.loads(self.content, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\json_init.py", line 346, in loads
return _default_decoder.decode(s)
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in _map_response_single_batch
Steps to reproduce
No response
Expected Behavior
No response
GraphRAG Config Used
# Paste your config here
Logs and screenshots
No response
Additional Information
GraphRAG Version:
Operating System:
Python Version:
Related Issues:
The text was updated successfully, but these errors were encountered:
kroot110
added
bug
Something isn't working
triage
Default label assignment, indicates new issue needs reviewed by a maintainer
labels
Feb 9, 2025
Do you need to file an issue?
Describe the bug
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in map_response_single_batch
Traceback (most recent call last):
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\structured_search\global_search\search.py", line 232, in map_response_single_batch
search_response = await self.llm.agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 142, in agenerate
async for attempt in retryer:
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 166, in anext
do = await self.iter(retry_state=self.retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 153, in iter
result = await action(retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_utils.py", line 99, in inner
return call(*args, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_init.py", line 398, in
self._add_action_func(lambda rs: rs.outcome.result())
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 451, in result
return self.__get_result()
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 403, in __get_result
raise self._exception
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 144, in agenerate
return await self._agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 268, in _agenerate
response = await self.async_client.chat.completions.create( # type: ignore
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai\resources\chat\completions.py", line 1720, in create
return await self._post(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1849, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1543, in request
return await self._request(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1646, in _request
return await self._process_response(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1743, in _process_response
return await api_response.parse()
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 430, in parse
parsed = self._parse(to=to)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 265, in parse
data = response.json()
File "D:\Anaconda3\envs\demo4\lib\site-packages\httpx_models.py", line 832, in json
return jsonlib.loads(self.content, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\json_init.py", line 346, in loads
return _default_decoder.decode(s)
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in map_response_single_batch
Traceback (most recent call last):
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\structured_search\global_search\search.py", line 232, in map_response_single_batch
search_response = await self.llm.agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 142, in agenerate
async for attempt in retryer:
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 166, in anext
do = await self.iter(retry_state=self.retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity\asyncio_init.py", line 153, in iter
result = await action(retry_state)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_utils.py", line 99, in inner
return call(*args, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\site-packages\tenacity_init.py", line 398, in
self._add_action_func(lambda rs: rs.outcome.result())
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 451, in result
return self.__get_result()
File "D:\Anaconda3\envs\demo4\lib\concurrent\futures_base.py", line 403, in __get_result
raise self._exception
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 144, in agenerate
return await self._agenerate(
File "D:\Anaconda3\envs\demo4\lib\site-packages\graphrag\query\llm\oai\chat_openai.py", line 268, in _agenerate
response = await self.async_client.chat.completions.create( # type: ignore
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai\resources\chat\completions.py", line 1720, in create
return await self._post(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1849, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1543, in request
return await self._request(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1646, in _request
return await self._process_response(
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_base_client.py", line 1743, in _process_response
return await api_response.parse()
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 430, in parse
parsed = self._parse(to=to)
File "D:\Anaconda3\envs\demo4\lib\site-packages\openai_response.py", line 265, in parse
data = response.json()
File "D:\Anaconda3\envs\demo4\lib\site-packages\httpx_models.py", line 832, in json
return jsonlib.loads(self.content, **kwargs)
File "D:\Anaconda3\envs\demo4\lib\json_init.py", line 346, in loads
return _default_decoder.decode(s)
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "D:\Anaconda3\envs\demo4\lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
Exception in _map_response_single_batch
Steps to reproduce
No response
Expected Behavior
No response
GraphRAG Config Used
# Paste your config here
Logs and screenshots
No response
Additional Information
The text was updated successfully, but these errors were encountered: