Skip to content

Commit

Permalink
✨ Also track progress of regular uploads (#268)
Browse files Browse the repository at this point in the history
  • Loading branch information
coyotte508 authored Oct 24, 2023
1 parent 59e9e8a commit b960622
Show file tree
Hide file tree
Showing 2 changed files with 85 additions and 72 deletions.
134 changes: 79 additions & 55 deletions packages/hub/src/lib/commit.ts
Original file line number Diff line number Diff line change
Expand Up @@ -423,65 +423,89 @@ export async function* commitIter(params: CommitParams): AsyncGenerator<CommitPr

yield { event: "phase", phase: "committing" };

const res = await (params.fetch ?? fetch)(
`${params.hubUrl ?? HUB_URL}/api/${repoId.type}s/${repoId.name}/commit/${encodeURIComponent(
params.branch ?? "main"
)}` + (params.isPullRequest ? "?create_pr=1" : ""),
{
method: "POST",
headers: {
...(params.credentials && { Authorization: `Bearer ${params.credentials.accessToken}` }),
"Content-Type": "application/x-ndjson",
},
body: [
return yield* eventToGenerator<CommitProgressEvent, CommitOutput>(
async (yieldCallback, returnCallback, rejectCallback) =>
(params.fetch ?? fetch)(
`${params.hubUrl ?? HUB_URL}/api/${repoId.type}s/${repoId.name}/commit/${encodeURIComponent(
params.branch ?? "main"
)}` + (params.isPullRequest ? "?create_pr=1" : ""),
{
key: "header",
value: {
summary: params.title,
description: params.description,
parentCommit: params.parentCommit,
} satisfies ApiCommitHeader,
},
...((await Promise.all(
allOperations.map((operation) => {
if (isFileOperation(operation)) {
const sha = lfsShas.get(operation.path);
if (sha) {
return {
key: "lfsFile",
value: {
path: operation.path,
algo: "sha256",
size: operation.content.size,
oid: sha,
} satisfies ApiCommitLfsFile,
};
}
}

return convertOperationToNdJson(operation);
})
)) satisfies ApiCommitOperation[]),
]
.map((x) => JSON.stringify(x))
.join("\n"),
}
);
method: "POST",
headers: {
...(params.credentials && { Authorization: `Bearer ${params.credentials.accessToken}` }),
"Content-Type": "application/x-ndjson",
},
body: [
{
key: "header",
value: {
summary: params.title,
description: params.description,
parentCommit: params.parentCommit,
} satisfies ApiCommitHeader,
},
...((await Promise.all(
allOperations.map((operation) => {
if (isFileOperation(operation)) {
const sha = lfsShas.get(operation.path);
if (sha) {
return {
key: "lfsFile",
value: {
path: operation.path,
algo: "sha256",
size: operation.content.size,
oid: sha,
} satisfies ApiCommitLfsFile,
};
}
}

if (!res.ok) {
throw await createApiError(res);
}
return convertOperationToNdJson(operation);
})
)) satisfies ApiCommitOperation[]),
]
.map((x) => JSON.stringify(x))
.join("\n"),
...({
progressHint: {
progressCallback: (progress: number) => {
// For now, we display equal progress for all files
// We could compute the progress based on the size of `convertOperationToNdJson` for each of the files instead
for (const op of allOperations) {
if (isFileOperation(op) && !lfsShas.has(op.path)) {
yieldCallback({
event: "fileProgress",
path: op.path,
progress,
type: "uploading",
});
}
}
},
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any),
}
)
.then(async (res) => {
if (!res.ok) {
throw await createApiError(res);
}

const json = await res.json();
const json = await res.json();

return {
pullRequestUrl: json.pullRequestUrl,
commit: {
oid: json.commitOid,
url: json.commitUrl,
},
hookOutput: json.hookOutput,
};
returnCallback({
pullRequestUrl: json.pullRequestUrl,
commit: {
oid: json.commitOid,
url: json.commitUrl,
},
hookOutput: json.hookOutput,
});
})
.catch(rejectCallback)
);
}

export async function commit(params: CommitParams): Promise<CommitOutput> {
Expand Down
23 changes: 6 additions & 17 deletions packages/hub/src/lib/upload-files-with-progress.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,8 @@ import { commitIter } from "./commit";
const multipartUploadTracking = new WeakMap<
(progress: number) => void,
{
paths: Record<
string,
{
numParts: number;
partsProgress: Record<number, number>;
}
>;
numParts: number;
partsProgress: Record<number, number>;
}
>();

Expand Down Expand Up @@ -67,7 +62,6 @@ export async function* uploadFilesWithProgress(params: {
}

const progressHint = init.progressHint as {
path: string;
progressCallback: (progress: number) => void;
} & (Record<string, never> | { part: number; numParts: number });
const progressCallback = init.progressCallback as (progress: number) => void;
Expand All @@ -79,20 +73,15 @@ export async function* uploadFilesWithProgress(params: {
if (progressHint.part !== undefined) {
let tracking = multipartUploadTracking.get(progressCallback);
if (!tracking) {
tracking = { paths: {} };
tracking = { numParts: progressHint.numParts, partsProgress: {} };
multipartUploadTracking.set(progressCallback, tracking);
}
const path = progressHint.path;
if (!tracking.paths[path]) {
tracking.paths[path] = { numParts: progressHint.numParts, partsProgress: {} };
}
const pathTracking = tracking.paths[path];
pathTracking.partsProgress[progressHint.part] = event.loaded / event.total;
tracking.partsProgress[progressHint.part] = event.loaded / event.total;
let totalProgress = 0;
for (const partProgress of Object.values(pathTracking.partsProgress)) {
for (const partProgress of Object.values(tracking.partsProgress)) {
totalProgress += partProgress;
}
progressCallback(totalProgress / pathTracking.numParts);
progressCallback(totalProgress / tracking.numParts);
} else {
progressCallback(event.loaded / event.total);
}
Expand Down

0 comments on commit b960622

Please sign in to comment.