Skip to content

Commit

Permalink
Merge release into embed
Browse files Browse the repository at this point in the history
  • Loading branch information
andrew-polk committed Dec 20, 2024
2 parents 6fc4c88 + c0e30c4 commit 942ad40
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 11 deletions.
90 changes: 83 additions & 7 deletions .github/workflows/build-and-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,86 @@ jobs:
- name: Run tests
run: yarn test:ci

# Not yet
# - name: Deploy to S3
# run: |
# aws s3 cp path/to/build/artifacts s3://your-bucket-name --recursive
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Deploy to S3 - alpha.bloomlibrary.org
if: github.ref == 'refs/heads/master'
run: |
aws s3 rm s3://alpha.bloomlibrary.org --recursive
# Copy everything that should be cached for a long time: contents of static directory and bloom-player directory, except bloom-player.htm itself.
# These things are safe to cache because the build inserts a hash into their names. Any update will cause a different file to be referenced.
aws s3 cp build s3://alpha.bloomlibrary.org --recursive --cache-control max-age=31536000 --exclude "*" --include "static/*" --include "bloom-player/*" --exclude "*.htm"
# Copy everything else, which should not be cached: the root directory files, including index.html, and any htm files, especially bloom-player's
aws s3 cp build s3://alpha.bloomlibrary.org --recursive --cache-control no-cache --exclude "static/*" --exclude "bloom-player/*" --include "*.htm"
# make this version of the script that helps with managing history on embedded sites
# note that we do not have the cache-defeating hash on this
aws s3 cp public/embed-bloomlibrary.js s3://share.bloomlibrary.org/alpha-assets/embed-bloomlibrary.js --cache-control no-cache
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_SECRET_ACCESS_KEY }}

- name: Deploy to S3 - dev-alpha.bloomlibrary.org
if: github.ref == 'refs/heads/master'
run: |
aws s3 rm s3://dev-alpha.bloomlibrary.org --recursive
# Copy everything that should be cached for a long time: contents of static directory and bloom-player directory, except bloom-player.htm itself.
# These things are safe to cache because the build inserts a hash into their names. Any update will cause a different file to be referenced.
aws s3 cp build s3://dev-alpha.bloomlibrary.org --recursive --cache-control max-age=31536000 --exclude "*" --include "static/*" --include "bloom-player/*" --exclude "*.htm"
# Copy everything else, which should not be cached: the root directory files, including index.html, and any htm files, especially bloom-player's
aws s3 cp build s3://dev-alpha.bloomlibrary.org --recursive --cache-control no-cache --exclude "static/*" --exclude "bloom-player/*" --include "*.htm"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_SECRET_ACCESS_KEY }}

- name: Deploy to S3 - bloomlibrary.org
if: github.ref == 'refs/heads/release'
run: |
# aws s3 rm s3://bloomlibrary.org --exclude "*/*" --include "bloom-player/*" --include "static/*" --include "translations/*" --recursive
# Copy everything that should be cached for a long time: contents of static directory and bloom-player directory, except bloom-player.htm itself.
# These things are safe to cache because the build inserts a hash into their names. Any update will cause a different file to be referenced.
# Please keep the --exclude robots.txt at the end of both commands! If published, it will prevent search engines from indexing us.
# aws s3 cp build s3://bloomlibrary.org --recursive --cache-control max-age=31536000 --exclude "*" --include "static/*" --include "bloom-player/*" --exclude "*.htm" --exclude "robots.txt"
# Copy everything else, which should not be cached: the root directory files, including index.html, and any htm files, especially bloom-player's
# aws s3 cp build s3://bloomlibrary.org --recursive --cache-control no-cache --exclude "static/*" --exclude "bloom-player/*" --include "*.htm" --exclude "robots.txt"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SIL_BLOOM_UPLOADER_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SIL_BLOOM_UPLOADER_AWS_SECRET_ACCESS_KEY }}

- name: Deploy to S3 - dev.bloomlibrary.org
if: github.ref == 'refs/heads/release'
run: |
aws s3 rm s3://dev.bloomlibrary.org --exclude "*/*" --include "bloom-player/*" --include "static/*" --include "translations/*" --recursive
# Copy everything that should be cached for a long time: contents of static directory and bloom-player directory, except bloom-player.htm itself.
# These things are safe to cache because the build inserts a hash into their names. Any update will cause a different file to be referenced.
aws s3 cp build s3://dev.bloomlibrary.org --recursive --cache-control max-age=31536000 --exclude "*" --include "static/*" --include "bloom-player/*" --exclude "*.htm"
# Copy everything else, which should not be cached: the root directory files, including index.html, and any htm files, especially bloom-player's
aws s3 cp build s3://dev.bloomlibrary.org --recursive --cache-control no-cache --exclude "static/*" --exclude "bloom-player/*" --include "*.htm"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_SECRET_ACCESS_KEY }}

- name: Deploy to S3 - embed.bloomlibrary.org
if: github.ref == 'refs/heads/embed'
run: |
aws s3 rm s3://embed.bloomlibrary.org --recursive
# Copy everything that should be cached for a long time: contents of static directory and bloom-player directory, except bloom-player.htm itself.
# These things are safe to cache because the build inserts a hash into their names. Any update will cause a different file to be referenced.
aws s3 cp build s3://embed.bloomlibrary.org --recursive --cache-control max-age=31536000 --exclude "*" --include "static/*" --include "bloom-player/*" --exclude "*.htm"
# Copy everything else, which should not be cached: the root directory files, including index.html, and any htm files, especially bloom-player's
aws s3 cp build s3://embed.bloomlibrary.org --recursive --cache-control no-cache --exclude "static/*" --exclude "bloom-player/*" --include "*.htm"
# make this version of the script that helps with managing history on embedded sites
# note that we do not have the cache-defeating hash on this
aws s3 cp public/embed-bloomlibrary.js s3://share.bloomlibrary.org/assets/embed-bloomlibrary.js --cache-control no-cache
env:
AWS_ACCESS_KEY_ID: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.SIL_LEAD_BLOOM_LIBRARY_DEPLOY_AWS_SECRET_ACCESS_KEY }}
11 changes: 7 additions & 4 deletions src/model/Book.ts
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,8 @@ export class Book {

public getBestTitle(langISO?: string): string {
const t = langISO ? this.allTitles.get(langISO) : this.title;
return t || this.title; // if we couldn't get this lang out of allTitles, use the official title
// if we couldn't get this lang out of allTitles, use the official title
return (t || this.title).replace(/[\r\n\v]+/g, " ");
}

// Passed a restrictionType that is one of the field names in IInternetLimits
Expand Down Expand Up @@ -570,11 +571,12 @@ export function getBestBookTitle(
rawAllTitlesJson: string,
contextLangTag?: string
): string {
if (!contextLangTag) return defaultTitle;
if (!contextLangTag) return defaultTitle.replace(/[\r\n\v]+/g, " ");

// enhance: could we do this faster with just a regular expression?
const map = parseAllTitles(rawAllTitlesJson);
return map.get(contextLangTag) || defaultTitle;
const contextTitle = map.get(contextLangTag);
return (contextTitle || defaultTitle).replace(/[\r\n\v]+/g, " ");
}

export function getBookTitleInLanguageOrUndefined(
Expand All @@ -583,7 +585,8 @@ export function getBookTitleInLanguageOrUndefined(
): string | undefined {
// enhance: could we do this faster with just a regular expression?
const map = parseAllTitles(bookInfo.allTitles);
return map.get(contextLangTag);
const contextTitle = map.get(contextLangTag);
return contextTitle?.replace(/[\r\n\v]+/g, " ");
}

function parseAllTitles(allTitlesString: string): Map<string, string> {
Expand Down

0 comments on commit 942ad40

Please sign in to comment.