Skip to content

Commit

Permalink
fix: prompt & bug (#698)
Browse files Browse the repository at this point in the history
  • Loading branch information
mizy authored Dec 6, 2023
1 parent b0d0fe2 commit b0c3b17
Show file tree
Hide file tree
Showing 14 changed files with 101 additions and 88 deletions.
2 changes: 1 addition & 1 deletion app/pages/Import/AIImport/Create.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ const Create = observer((props: { visible: boolean; onCancel: () => void }) => {

<Form layout="vertical" style={{ display: step == 1 ? 'block' : 'none' }}>
<Form.Item label={intl.get('llm.file')}>
<span>{type === 'file' ? valuse.file : valuse.filePath}</span>
<span>{valuse.file}</span>
</Form.Item>
<Form.Item label={intl.get('llm.url')}>
<Input disabled value={llm.config.url} />
Expand Down
18 changes: 10 additions & 8 deletions app/pages/Import/TaskList/TaskItem/AIImportItem.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ const AIImportItem = observer((props: IProps) => {
const onTaskDelete = () => {
_delete('/api/llm/import/job/' + llmJob.job_id)().then((res) => {
if (res.code === 0) {
antMsg.success(intl.get('import.deleteSuccess'));
antMsg.success(intl.get('common.success'));
props.onRefresh();
}
});
Expand All @@ -69,7 +69,7 @@ const AIImportItem = observer((props: IProps) => {
const onTaskStop = () => {
post('/api/llm/import/job/cancel')({ jobId: llmJob.job_id }).then((res) => {
if (res.code === 0) {
antMsg.success(intl.get('import.stopImportingSuccess'));
antMsg.success(intl.get('common.success'));
props.onRefresh();
}
});
Expand All @@ -78,7 +78,7 @@ const AIImportItem = observer((props: IProps) => {
setRerunLoading(true);
const res = await post('/api/llm/import/job/rerun')({ jobId: llmJob.job_id });
if (res.code === 0) {
antMsg.success(intl.get('common.rerunSuccess'));
antMsg.success(intl.get('common.success'));
props.onRefresh();
}
setRerunLoading(false);
Expand Down Expand Up @@ -172,11 +172,13 @@ const AIImportItem = observer((props: IProps) => {
/>
</div>
<div className={styles.operations}>
<Button className="primaryBtn" onClick={() => onViewLog(props.data)}>
<Tooltip title={intl.get('import.viewLogs')}>
<Icon type="icon-studio-btn-ddl" />
</Tooltip>
</Button>
{llmJob.status !== ILLMStatus.Pending && (
<Button className="primaryBtn" onClick={() => onViewLog(props.data)}>
<Tooltip title={intl.get('import.viewLogs')}>
<Icon type="icon-studio-btn-ddl" />
</Tooltip>
</Button>
)}
{llmJob.status === ILLMStatus.Running && (
<Popconfirm
placement="left"
Expand Down
2 changes: 1 addition & 1 deletion app/pages/Import/TaskList/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ const TaskList = () => {
{intl.get('import.uploadTemp')}
</Button>
)}
{global.appSetting.beta.open && global.appSetting.beta.functions.llmImport.open && (
{global.appSetting.beta.open && global.appSetting.beta.functions?.llmImport.open && (
<Button className="studioAddBtn" onClick={onCreateAIIMport}>
<Icon className="studioAddBtnIcon" type="icon-studio-btn-add" />
{intl.get('llm.aiImport')}
Expand Down
21 changes: 5 additions & 16 deletions app/pages/Import/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { useI18n } from '@vesoft-inc/i18n';
import DatasourceList from './DatasourceList';
import styles from './index.module.less';
import TaskList from './TaskList';
import llm from '@app/stores/llm';

const Import = () => {
const history = useHistory();
Expand All @@ -15,6 +16,7 @@ const Import = () => {
const { intl } = useI18n();
useEffect(() => {
trackPageView('/import');
llm.fetchConfig();
}, []);
useEffect(() => {
const path = location.pathname;
Expand All @@ -27,28 +29,15 @@ const Import = () => {
return (
<div className={cls(styles.nebuaImportPage, 'studioCenterLayout')}>
<div className="studioTabHeader">
<Radio.Group
className="studioTabGroup"
value={tab}
buttonStyle="solid"
onChange={handleTabChange}
>
<Radio.Group className="studioTabGroup" value={tab} buttonStyle="solid" onChange={handleTabChange}>
<Radio.Button value="tasks">{intl.get('import.importData')}</Radio.Button>
<Radio.Button value="datasources">{intl.get('import.dataSourceManagement')}</Radio.Button>
</Radio.Group>
</div>
<div>
<Route
path={`/import/datasources`}
exact={true}
component={DatasourceList}
/>
<Route path={`/import/datasources`} exact={true} component={DatasourceList} />

<Route
path={`/import/tasks`}
exact={true}
component={TaskList}
/>
<Route path={`/import/tasks`} exact={true} component={TaskList} />
</div>
</div>
);
Expand Down
2 changes: 1 addition & 1 deletion app/pages/MainPage/index.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Suspense } from 'react';
import { Suspense, useEffect } from 'react';
import { Layout, Spin } from 'antd';
import { Redirect, Route, Switch } from 'react-router-dom';
import { shouldAlwaysShowWelcome } from '@app/pages/Welcome';
Expand Down
2 changes: 1 addition & 1 deletion app/pages/Schema/SchemaConfig/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const SchemaConfig = () => {
const { schema, global } = useStore();
const { spaces, getSpaces, switchSpace, currentSpace } = schema;
const { currentLocale } = useI18n();
const showViewSchemaBetaFunc = global.appSetting.beta.open && global.appSetting.beta.functions.viewSchema.open;
const showViewSchemaBetaFunc = global.appSetting.beta.open && global.appSetting.beta.functions?.viewSchema?.open;
const spaceInUrl = useMemo(() => {
const params = new URLSearchParams(location.search);
return params.get('space');
Expand Down
19 changes: 13 additions & 6 deletions app/stores/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,21 @@ diff
> RETURN p.person.name;
Question:{query_str}
`;
export const llmImportPrompt = `You are knowledge graph Expert.
please extract relationship data from the text below, referring to the schema of the graph, and return the results in the following JSON format without interpreting, don't explain just return the results directly.
export const llmImportPrompt = `As a knowledge graph expert, your task is to extract relationship data from the following text:
----
{text}
----
Please proceed according to the schema of the knowledge graph:
----
{spaceSchema}
----
Return the results directly, without interpretation. The results should be in the following JSON format:
{
"nodes":[{ "name":"","type":"","props":{} }],
"edges":[{ "src":"","dst":"","edgeType":"","props":{} }]
"nodes":[{ "name":string,"type":string,"props":object }],
"edges":[{ "src":string,"dst":string,"edgeType":string,"props":object }]
}
The schema of the graph is: {spaceSchema}
The text is: {text}
The result is:
`;
export const llmImportTask = `please excute the task below,and return the result,dont' explain,just return the result directly.
Expand Down
4 changes: 2 additions & 2 deletions server/api/studio/etc/studio-api.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -61,5 +61,5 @@ DB:
MaxIdleConns: 10
LLM:
GQLPath: "./data/llm"
GQLBatchSize: 1
MaxBlockSize: 1
GQLBatchSize: 100
MaxBlockSize: 0
29 changes: 2 additions & 27 deletions server/api/studio/internal/model/llm-job.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package db
import (
"time"

"github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/base"
"gorm.io/datatypes"
)

Expand All @@ -15,35 +16,9 @@ type LLMJob struct {
SpaceSchemaString string `json:"space_schema_string"`
File string `json:"file"`
JobType string `json:"job_type"`
Status LLMStatus `json:"status"`
Status base.LLMStatus `json:"status"`
PromptTemplate string `json:"prompt_template"`
Process datatypes.JSON `json:"process"`
CreateTime time.Time `json:"create_time" gorm:"column:create_time;type:datetime;autoCreateTime"`
UpdateTime time.Time `json:"update_ime" gorm:"column:update_time;type:datetime;autoUpdateTime"`
}

type Process struct {
TotalSize int `json:"total"`
CurrentSize int `json:"current"`
Ratio float64 `json:"ratio"`
FailedReason string `json:"failed_reason"`
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
}

type LLMStatus string

const (
LLMStatusRunning LLMStatus = "running"
LLMStatusSuccess LLMStatus = "success"
LLMStatusFailed LLMStatus = "failed"
LLMStatusCancel LLMStatus = "cancel"
LLMStatusPending LLMStatus = "pending"
)

type LLMJobType string

const (
LLMJobTypeFile LLMJobType = "file"
LLMJobTypeFilePath LLMJobType = "file_path"
)
9 changes: 5 additions & 4 deletions server/api/studio/internal/service/llm/import.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
db "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/model"
"github.com/vesoft-inc/nebula-studio/server/api/studio/internal/types"
"github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/auth"
"github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/base"
"github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/llm"
"gorm.io/datatypes"
)
Expand All @@ -35,7 +36,7 @@ func (g *llmService) AddImportJob(req *types.LLMImportRequest) (resp *types.LLMR
Space: req.Space,
File: req.File,
JobType: req.Type,
Status: db.LLMStatusPending,
Status: base.LLMStatusPending,
Host: config.Host,
UserName: config.UserName,
PromptTemplate: req.PromptTemplate,
Expand Down Expand Up @@ -95,11 +96,11 @@ func (g *llmService) HandleLLMImportJob(req *types.HandleLLMImportRequest) (resp
}

if req.Action == "cancel" {
job.Status = db.LLMStatusCancel
llm.ChangeRunningJobStatus(job.JobID, db.LLMStatusCancel)
job.Status = base.LLMStatusCancel
llm.ChangeRunningJobStatus(job.JobID, base.LLMStatusCancel)
}
if req.Action == "rerun" {
job.Status = db.LLMStatusPending
job.Status = base.LLMStatusPending
// datatypes.JSON
job.Process = datatypes.JSON("{}")
//delete log & ngql
Expand Down
26 changes: 26 additions & 0 deletions server/api/studio/pkg/base/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,29 @@ const (
Error StatusCode = -1
Success StatusCode = 0
)

type Process struct {
TotalSize int `json:"total"`
CurrentSize int `json:"current"`
Ratio float64 `json:"ratio"`
FailedReason string `json:"failed_reason"`
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
}

type LLMStatus string

const (
LLMStatusRunning LLMStatus = "running"
LLMStatusSuccess LLMStatus = "success"
LLMStatusFailed LLMStatus = "failed"
LLMStatusCancel LLMStatus = "cancel"
LLMStatusPending LLMStatus = "pending"
)

type LLMJobType string

const (
LLMJobTypeFile LLMJobType = "file"
LLMJobTypeFilePath LLMJobType = "file_path"
)
Loading

0 comments on commit b0c3b17

Please sign in to comment.