Skip to content

Commit

Permalink
fix refresh bug
Browse files Browse the repository at this point in the history
  • Loading branch information
MrJs133 committed Dec 27, 2024
1 parent 10d802f commit fddc8e0
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 19 deletions.
4 changes: 2 additions & 2 deletions hugegraph-llm/src/hugegraph_llm/config/prompt_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,10 +205,10 @@ class PromptConfig(BasePromptConfig):
The generated gremlin is:
"""

doc_input_text = """Meet Sarah, a 30-year-old attorney, and her roommate, James, whom she's shared a home with since 2010.
doc_input_text: str = """Meet Sarah, a 30-year-old attorney, and her roommate, James, whom she's shared a home with since 2010.
James, in his professional life, works as a journalist. Additionally, Sarah is the proud owner of the website
www.sarahsplace.com, while James manages his own webpage, though the specific URL is not mentioned here.
These two individuals, Sarah and James, have not only forged a strong personal bond as roommates but have also
carved out their distinctive digital presence through their respective webpages, showcasing their varied interests
and experiences.
"""
"""
6 changes: 4 additions & 2 deletions hugegraph-llm/src/hugegraph_llm/demo/rag_demo/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def init_rag_ui() -> gr.Interface:
textbox_array_graph_config = create_configs_block()

with gr.Tab(label="1. Build RAG Index 💡"):
textbox_input_schema, textbox_info_extract_template = create_vector_graph_block()
textbox_input_text, textbox_input_schema, textbox_info_extract_template = create_vector_graph_block()
with gr.Tab(label="2. (Graph)RAG & User Functions 📖"):
(
textbox_inp,
Expand All @@ -147,6 +147,7 @@ def refresh_ui_config_prompt() -> tuple:
huge_settings.graph_user,
huge_settings.graph_pwd,
huge_settings.graph_space,
prompt.doc_input_text,
prompt.graph_schema,
prompt.extract_graph_prompt,
prompt.default_question,
Expand All @@ -155,7 +156,7 @@ def refresh_ui_config_prompt() -> tuple:
prompt.custom_rerank_info,
prompt.default_question,
huge_settings.graph_name,
prompt.gremlin_generate_prompt,
prompt.gremlin_generate_prompt
)

hugegraph_llm_ui.load( # pylint: disable=E1101
Expand All @@ -167,6 +168,7 @@ def refresh_ui_config_prompt() -> tuple:
textbox_array_graph_config[3],
textbox_array_graph_config[4],
textbox_array_graph_config[5],
textbox_input_text,
textbox_input_schema,
textbox_info_extract_template,
textbox_inp,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,4 +138,4 @@ def on_tab_select(input_f, input_t, evt: gr.SelectData):
tab_upload_file.select(fn=on_tab_select, inputs=[input_file, input_text], outputs=[input_file, input_text])
tab_upload_text.select(fn=on_tab_select, inputs=[input_file, input_text], outputs=[input_file, input_text])

return input_schema, info_extract_template
return input_text, input_schema, info_extract_template
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ class FetchGraphData:
def __init__(self, graph: PyHugeClient):
self.graph = graph

def run(self, context: Optional[Dict[str, Any]]) -> Dict[str, Any]:
if context is None:
context = {}
if "num_vertices" not in context:
context["num_vertices"] = self.graph.gremlin().exec("g.V().id().count()")["data"]
if "num_edges" not in context:
context["num_edges"] = self.graph.gremlin().exec("g.E().id().count()")["data"]
if "vertices" not in context:
context["vertices"] = self.graph.gremlin().exec("g.V().id().limit(10000)")["data"]
if "edges" not in context:
context["edges"] = self.graph.gremlin().exec("g.E().id().limit(100)")["data"]
return context
def run(self, graph_summary_info: Optional[Dict[str, Any]]) -> Dict[str, Any]:
if graph_summary_info is None:
graph_summary_info = {}
if "num_vertices" not in graph_summary_info:
graph_summary_info["num_vertices"] = self.graph.gremlin().exec("g.V().id().count()")["data"]
if "num_edges" not in graph_summary_info:
graph_summary_info["num_edges"] = self.graph.gremlin().exec("g.E().id().count()")["data"]
if "vertices" not in graph_summary_info:
graph_summary_info["vertices"] = self.graph.gremlin().exec("g.V().id().limit(10000)")["data"]
if "edges" not in graph_summary_info:
graph_summary_info["edges"] = self.graph.gremlin().exec("g.E().id().limit(100)")["data"]
return graph_summary_info
10 changes: 8 additions & 2 deletions hugegraph-llm/src/hugegraph_llm/utils/graph_index_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,14 @@ def extract_graph(input_file, input_text, schema, example_prompt) -> str:

try:
context = builder.run()
graph_elements = {"vertices": context["vertices"], "edges": context["edges"]}
return json.dumps(graph_elements, ensure_ascii=False, indent=2)
if not context["vertices"] and not context["edges"]:
log.info("Please check the schema.(The schema may not match the Doc)")
return json.dumps(
{"vertices": context["vertices"], "edges": context["edges"], "warning": "The schema may not match the Doc"},
ensure_ascii=False,
indent=2
)
return json.dumps({"vertices": context["vertices"], "edges": context["edges"]}, ensure_ascii=False, indent=2)
except Exception as e: # pylint: disable=broad-exception-caught
log.error(e)
raise gr.Error(str(e))
Expand Down

0 comments on commit fddc8e0

Please sign in to comment.