Merge branch 'main' into webui-add-bypass

This commit is contained in:
choizhang
2025-04-11 10:52:33 +08:00
5 changed files with 165 additions and 127 deletions

View File

@@ -1 +1 @@
__api_version__ = "0145" __api_version__ = "0146"

File diff suppressed because one or more lines are too long

View File

@@ -8,7 +8,7 @@
<link rel="icon" type="image/svg+xml" href="logo.png" /> <link rel="icon" type="image/svg+xml" href="logo.png" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Lightrag</title> <title>Lightrag</title>
<script type="module" crossorigin src="/webui/assets/index-BPm_J2w3.js"></script> <script type="module" crossorigin src="/webui/assets/index-DSVCuARS.js"></script>
<link rel="stylesheet" crossorigin href="/webui/assets/index-CTB4Vp_z.css"> <link rel="stylesheet" crossorigin href="/webui/assets/index-CTB4Vp_z.css">
</head> </head>
<body> <body>

View File

@@ -245,10 +245,10 @@ export const checkHealth = async (): Promise<
try { try {
const response = await axiosInstance.get('/health') const response = await axiosInstance.get('/health')
return response.data return response.data
} catch (e) { } catch (error) {
return { return {
status: 'error', status: 'error',
message: errorMessage(e) message: errorMessage(error)
} }
} }
} }
@@ -278,65 +278,100 @@ export const queryTextStream = async (
onChunk: (chunk: string) => void, onChunk: (chunk: string) => void,
onError?: (error: string) => void onError?: (error: string) => void
) => { ) => {
const apiKey = useSettingsStore.getState().apiKey;
const token = localStorage.getItem('LIGHTRAG-API-TOKEN');
const headers: HeadersInit = {
'Content-Type': 'application/json',
'Accept': 'application/x-ndjson',
};
if (token) {
headers['Authorization'] = `Bearer ${token}`;
}
if (apiKey) {
headers['X-API-Key'] = apiKey;
}
try { try {
let buffer = '' const response = await fetch(`${backendBaseUrl}/query/stream`, {
await axiosInstance method: 'POST',
.post('/query/stream', request, { headers: headers,
responseType: 'text', body: JSON.stringify(request),
headers: { });
Accept: 'application/x-ndjson'
}, if (!response.ok) {
transformResponse: [ // Handle HTTP errors (e.g., 4xx, 5xx)
(data: string) => { let errorBody = 'Unknown error';
// Accumulate the data and process complete lines try {
buffer += data errorBody = await response.text(); // Try to get error details from body
const lines = buffer.split('\n') } catch { /* ignore */ }
// Keep the last potentially incomplete line in the buffer throw new Error(`HTTP error ${response.status}: ${response.statusText}\n${errorBody}`);
buffer = lines.pop() || '' }
if (!response.body) {
throw new Error('Response body is null');
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (true) {
const { done, value } = await reader.read();
if (done) {
break; // Stream finished
}
// Decode the chunk and add to buffer
buffer += decoder.decode(value, { stream: true }); // stream: true handles multi-byte chars split across chunks
// Process complete lines (NDJSON)
const lines = buffer.split('\n');
buffer = lines.pop() || ''; // Keep potentially incomplete line in buffer
for (const line of lines) { for (const line of lines) {
if (line.trim()) { if (line.trim()) {
try { try {
const parsed = JSON.parse(line) const parsed = JSON.parse(line);
if (parsed.response) { if (parsed.response) {
onChunk(parsed.response) console.log('Received chunk:', parsed.response); // Log for debugging
onChunk(parsed.response);
} else if (parsed.error && onError) { } else if (parsed.error && onError) {
onError(parsed.error) onError(parsed.error);
}
} catch (e) {
console.error('Error parsing stream chunk:', e)
if (onError) onError('Error parsing server response')
}
}
}
return data
}
]
})
.catch((error) => {
if (onError) onError(errorMessage(error))
})
// Process any remaining data in the buffer
if (buffer.trim()) {
try {
const parsed = JSON.parse(buffer)
if (parsed.response) {
onChunk(parsed.response)
} else if (parsed.error && onError) {
onError(parsed.error)
}
} catch (e) {
console.error('Error parsing final chunk:', e)
if (onError) onError('Error parsing server response')
}
} }
} catch (error) { } catch (error) {
const message = errorMessage(error) console.error('Error parsing stream chunk:', line, error);
console.error('Stream request failed:', message) if (onError) onError(`Error parsing server response: ${line}`);
if (onError) onError(message)
} }
} }
}
}
// Process any remaining data in the buffer after the stream ends
if (buffer.trim()) {
try {
const parsed = JSON.parse(buffer);
if (parsed.response) {
onChunk(parsed.response);
} else if (parsed.error && onError) {
onError(parsed.error);
}
} catch (error) {
console.error('Error parsing final chunk:', buffer, error);
if (onError) onError(`Error parsing final server response: ${buffer}`);
}
}
} catch (error) {
const message = errorMessage(error);
console.error('Stream request failed:', message);
if (onError) {
onError(message);
} else {
// If no specific onError handler, maybe throw or log more prominently
console.error('Unhandled stream error:', message);
}
}
};
export const insertText = async (text: string): Promise<DocActionResponse> => { export const insertText = async (text: string): Promise<DocActionResponse> => {
const response = await axiosInstance.post('/documents/text', { text }) const response = await axiosInstance.post('/documents/text', { text })

View File

@@ -60,6 +60,7 @@ export default function RetrievalTesting() {
} }
return newMessages return newMessages
}) })
scrollToBottom()
} }
// Prepare query parameters // Prepare query parameters
@@ -69,6 +70,7 @@ export default function RetrievalTesting() {
query: userMessage.content, query: userMessage.content,
conversation_history: prevMessages conversation_history: prevMessages
.filter((m) => m.isError !== true) .filter((m) => m.isError !== true)
.slice(-(state.querySettings.history_turns || 0) * 2)
.map((m) => ({ role: m.role, content: m.content })) .map((m) => ({ role: m.role, content: m.content }))
} }