Skip to content

Commit

Permalink
FIX: mannualy seek position in file chunk based on last position
Browse files Browse the repository at this point in the history
reason: Bun.file.silce.stream does not start streaming according to offset
see: oven-sh/bun#7057
  • Loading branch information
binya committed Aug 9, 2024
1 parent 7d38adc commit 8d82cff
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 12 deletions.
1 change: 1 addition & 0 deletions components/Table.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ function getHeaders(row: NestedRow) {
}

export function buildRows(rows: NestedRow[]) {
if (!rows.length) return [];
const headers = getHeaders(rows[0]);
return (
rows.map((row, i) => (
Expand Down
62 changes: 50 additions & 12 deletions index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -103,35 +103,73 @@ async function handleHttpRequest(req: Request, server: Server) {
}

const decoder = new TextDecoder();
async function getFileAsJsonObjectsArray(file: BunFile, size = 0) {
async function getFileAsJsonObjectsArray(file: BunFile, lastPosition = 0) {
if (!file) {
throw new Error("invalid file");
}
console.log('recieved size:', size)
const stream = file.slice(size, file.size).stream();

console.log('received lastPosition:', lastPosition);
if (lastPosition >= file.size) {
return [];
}

const stream = file.stream();
let remainingData = "";
const jsonObjects: NestedRow[] = [];
let bytesRead = 0;

// ReadableStream<Uint8Array> does have [Symbol.asyncIterator]()
//@ts-ignore
for await (const chunk of stream) {
const str = decoder.decode(chunk);
remainingData += str; // Append the chunk to the remaining data
// Split the remaining data by newline character
if (bytesRead + chunk.length <= lastPosition) {
bytesRead += chunk.length;
continue; // Skip this chunk if it's before lastPosition
}

let relevantChunk;
if (bytesRead < lastPosition) {
// If part of the chunk is before lastPosition, slice it
relevantChunk = chunk.slice(lastPosition - bytesRead);
bytesRead = lastPosition;
} else {
relevantChunk = chunk;
}

const str = decoder.decode(relevantChunk);
remainingData += str;

let lines = remainingData.split(/\r?\n/);
// Loop through each line, except the last one

while (lines.length > 1) {
// Remove the first line from the array and add it to the objects array
const line = lines.shift();
jsonObjects.push(JSON.parse(line || "{}"));
const line = lines.shift()?.trim();
if (line) {
try {
jsonObjects.push(JSON.parse(line));
} catch (error) {
console.error("Error parsing JSON:", error);
}
}
}
// Update the remaining data with the last incomplete line
remainingData = lines[0];

remainingData = lines[0] || "";
bytesRead += relevantChunk.length;
}

// Handle any remaining data
if (remainingData.trim()) {
try {
jsonObjects.push(JSON.parse(remainingData));
} catch (error) {
console.error("Error parsing JSON:", error);
}
}

return jsonObjects;
}




function layout(html = '', js: string = '') {
const withLayout = htmlLayoutFileText.replace('{css}', `<style>${generalCssFileText}</style>`).replace('{html}', html).replace('{js}', `<script>${js}</script>`);
return (
Expand Down

0 comments on commit 8d82cff

Please sign in to comment.