Skip to content

Commit

Permalink
fixed file downloads (#261)
Browse files Browse the repository at this point in the history
* fixed file downloads

* correct spec location

* remove logs

* ignore history folder

* remove console.log
  • Loading branch information
jmagoon authored Nov 26, 2024
1 parent d65e310 commit d636faf
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 8 deletions.
12 changes: 8 additions & 4 deletions frontend/server/execution.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@ export async function syncExecutionResults(
executionUuid,
anvilConfiguration,
merkle,
spec,
) {
let s3Prefix;
let org;
if (anvilConfiguration.anvil.token) {
const data = atob(anvilConfiguration.anvil.token.split(".")[1]);
const org = JSON.parse(data).sub;
org = JSON.parse(data).sub;
s3Prefix = `${org}/${pipelineUuid}/${executionUuid}`;
} else {
s3Prefix = `${pipelineUuid}/${executionUuid}`;
Expand All @@ -28,10 +30,13 @@ export async function syncExecutionResults(
try {
// for downloading files
for (const blockKey in merkle_persed.blocks) {
const blockSpec = spec[blockKey];
if (!org || !blockSpec?.action?.container?.image) {
continue;
}
const block = merkle_persed.blocks[blockKey];
const blockPath = localPath.split("history")[0];
const blockName = blockKey.split("-").slice(0, -1).join("-");
const blocksS3Prefix = `${blockName}-${block.hash}-build`;
const blocksS3Prefix = `${org}/${blockSpec?.action?.container?.image}-${block.hash}-build`;
const localBlockDir = path.join(blockPath, blockKey);

if (!fs.existsSync(localBlockDir)) {
Expand All @@ -45,7 +50,6 @@ export async function syncExecutionResults(
localBlockDir,
anvilConfiguration,
);
// console.log(`Downloaded folder: ${blocksS3Prefix} to ${localBlockDir}`);
} catch (err) {
console.error("Error downloading the folder:", err);
}
Expand Down
38 changes: 37 additions & 1 deletion frontend/server/git.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,24 @@ import fs from "fs";
import path from "path";
import { cacheJoin } from "./cache.js";

async function ensureGitignore(dir) {
const gitignorePath = path.join(dir, ".gitignore");
try {
// Check if .gitignore exists
await fs.promises.access(gitignorePath);
const content = await fs.promises.readFile(gitignorePath, "utf8");
if (!content.includes("history/")) {
await fs.promises.appendFile(gitignorePath, "\nhistory/\n");
return true; // indicates .gitignore was modified
}
return false; // indicates no changes were needed
} catch (e) {
// .gitignore doesn't exist, create it
await fs.promises.writeFile(gitignorePath, "history/\n", "utf8");
return true; // indicates .gitignore was created
}
}

export async function ensureGitRepoAndCommitBlocks(
buildContextStatuses,
buildPath,
Expand All @@ -14,7 +32,7 @@ export async function ensureGitRepoAndCommitBlocks(
await Promise.all(
buildContextStatuses
.filter((context) => {
context.hash != "";
return context.hash != "";
})
.map(async ({ blockKey }) => {
const sourcePath = path.join(buildPath, blockKey);
Expand Down Expand Up @@ -64,6 +82,24 @@ export async function ensureGitRepoAndCommitBlocks(
});
}

const gitignoreModified = await ensureGitignore(cachePath);

if (gitignoreModified) {
// Add and commit .gitignore if it was created or modified
await git.add({ fs, dir: cachePath, filepath: ".gitignore" });
await git.commit({
fs,
dir: cachePath,
message: isRepo
? "chore: update .gitignore"
: "chore: initial commit with .gitignore",
author: {
name: "Pipeline System",
email: "[email protected]",
},
});
}

// 3. Check for changes
const statusMatrix = await git.statusMatrix({ fs, dir: cachePath });
const hasChanges = statusMatrix.some(
Expand Down
5 changes: 4 additions & 1 deletion frontend/server/router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -285,21 +285,24 @@ export const appRouter = router({
pipelineUuid: z.string(),
executionUuid: z.string(),
anvilConfiguration: anvilConfigurationSchema,
merkle: z.string(),
merkle: z.string().optional(),
spec: z.any().optional(),
}),
async ({
pipelinePath,
pipelineUuid,
executionUuid,
anvilConfiguration,
merkle,
spec,
}) =>
syncExecutionResults(
pipelinePath,
pipelineUuid,
executionUuid,
anvilConfiguration,
merkle,
spec,
),
),

Expand Down
3 changes: 2 additions & 1 deletion frontend/server/s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,14 @@ export async function uploadDirectory(key, diretoryPath, anvilConfiguration) {

async function upload(key, filePath, anvilConfiguration) {
const client = getClient(anvilConfiguration);
const s3key = getFullS3Key(key, anvilConfiguration);

try {
const fileBody = await fs.readFile(filePath);
const res = await client.send(
new PutObjectCommand({
Bucket: anvilConfiguration.s3.bucket,
Key: getFullS3Key(key, anvilConfiguration),
Key: s3key,
Body: fileBody,
}),
);
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/components/ui/WorkspaceFetcher.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export default function WorkspaceFetcher() {
setPipelines(updatedPipelines);
if (isActive) {
try {
const merkle = "undefined"; // while running Merkle should be a blank something.
const merkle = undefined; // while running Merkle should be a blank something.
await syncResults(key, merkle);
// TODO:
// Surface sync errors
Expand Down
1 change: 1 addition & 0 deletions frontend/src/hooks/useExecutionResults.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export const useSyncExecutionResults = () => {
executionUuid: pipeline.record.Execution,
anvilConfiguration: configuration,
merkle: merkle,
spec: pipeline.data,
});
};

Expand Down

0 comments on commit d636faf

Please sign in to comment.