Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
250 changes: 231 additions & 19 deletions public/sw.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/* eslint-disable no-restricted-syntax */
/* eslint-disable no-useless-return */
/* eslint-disable import/prefer-default-export */
/* eslint-disable no-unused-vars */
Expand All @@ -10,14 +11,86 @@ const cacheName = 'simorghCache_v4';
const pwaClients = new Map();
let isPWADeviceOffline = false;

// --- IndexedDB helpers ---
const DB_NAME = 'simorghOfflineDB';
const STORE_NAME = 'cachedArticles';
const MAX_ARTICLE_AGE_MS = 72 * 60 * 60 * 1000; // 72 hours
const REFRESH_INTERVAL_MS = 24 * 60 * 60 * 1000; // 24 hours

const openDB = () =>
new Promise((resolve, reject) => {
const req = indexedDB.open(DB_NAME, 1);
req.onupgradeneeded = e => {
const db = e.target.result;
if (!db.objectStoreNames.contains(STORE_NAME)) {
db.createObjectStore(STORE_NAME, { keyPath: 'url' });
}
if (!db.objectStoreNames.contains('meta')) {
db.createObjectStore('meta', { keyPath: 'key' });
}
};
req.onsuccess = e => resolve(e.target.result);
req.onerror = e => reject(e.target.error);
});

const dbGet = async (store, key) => {
const db = await openDB();
return new Promise((resolve, reject) => {
const tx = db.transaction(store, 'readonly');
const req = tx.objectStore(store).get(key);
req.onsuccess = e => resolve(e.target.result);
req.onerror = e => reject(e.target.error);
});
};

const dbPut = async (store, value) => {
const db = await openDB();
return new Promise((resolve, reject) => {
const tx = db.transaction(store, 'readwrite');
const req = tx.objectStore(store).put(value);
req.onsuccess = () => resolve();
req.onerror = e => reject(e.target.error);
});
};

const dbGetAll = async store => {
const db = await openDB();
return new Promise((resolve, reject) => {
const tx = db.transaction(store, 'readonly');
const req = tx.objectStore(store).getAll();
req.onsuccess = e => resolve(e.target.result);
req.onerror = e => reject(e.target.error);
});
};

const dbDelete = async (store, key) => {
const db = await openDB();
return new Promise((resolve, reject) => {
const tx = db.transaction(store, 'readwrite');
const req = tx.objectStore(store).delete(key);
req.onsuccess = () => resolve();
req.onerror = e => reject(e.target.error);
});
};
Comment on lines +67 to +74
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
const db = await openDB();
return new Promise((resolve, reject) => {
const tx = db.transaction(store, 'readwrite');
const req = tx.objectStore(store).delete(key);
req.onsuccess = () => resolve();
req.onerror = e => reject(e.target.error);
});
};
const db = await openDB();
const tx = db.transaction(store, 'readwrite');
await tx.objectStore(store).delete(key);
};

Is there a reason why need all these promise wrappers around these db operations?


// --------------------
// Helper Functions
// --------------------

const loggerEnabled = true;
const generatedTimestamp = new Date().toISOString();

const logger = (...args) => {
if (!loggerEnabled) return;
// eslint-disable-next-line no-console
console.log(`[SW ${version}]`, ...args);
};

const getServiceFromUrl = url => new URL(url).pathname.split('/')[1];
const getOfflinePageUrl = service => `/${service}/offline`;

const cacheResource = async (cache, url) => {
logger('cacheResource', { url });
try {
const response = await fetch(url);
if (response.ok) await cache.put(url, response.clone());
Expand All @@ -27,16 +100,10 @@ const cacheResource = async (cache, url) => {
}
};

const cacheOfflinePageAndResources = async service => {
const cache = await caches.open(cacheName);
const offlinePageUrl = new URL(
getOfflinePageUrl(service),
self.location.origin,
).href;

if (await cache.match(offlinePageUrl)) return;
const cachePageAndResources = async (cache, url, forceRefresh = false) => {
if (!forceRefresh && (await cache.match(url))) return;

const resp = await cacheResource(cache, offlinePageUrl);
const resp = await cacheResource(cache, url);
if (!resp || !resp.ok) return;

const html = await resp.text();
Expand All @@ -48,7 +115,104 @@ const cacheOfflinePageAndResources = async service => {
);

const resources = [...scriptSrcs, ...linkHrefs].filter(Boolean);
await Promise.allSettled(resources.map(url => cacheResource(cache, url)));
await Promise.allSettled(resources.map(r => cacheResource(cache, r)));
};

const cacheOfflinePageAndResources = async (service, forceRefresh = false) => {
const cache = await caches.open(cacheName);
const offlinePageUrl = new URL(
getOfflinePageUrl(service),
self.location.origin,
).href;

await cachePageAndResources(cache, offlinePageUrl, forceRefresh);
};

const getMostReadDataFromOfflinePage = async service => {
const offlinePageUrl = new URL(
getOfflinePageUrl(service),
self.location.origin,
).href;

const cache = await caches.open(cacheName);
const cachedResponse = await cache.match(offlinePageUrl);
if (!cachedResponse) return null;

const html = await cachedResponse.text();

const match = html.match(
/<script[^>]*id="most-read-data"[^>]*>(.+?)<\/script>/s,
);
if (!match) return null;

try {
return JSON.parse(match[1]);
} catch {
return null;
}
};

const cacheArticles = async service => {
const lastSync = await dbGet('meta', 'lastArticleSync');
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO:

potentially we should save this per service, i.e. lastArticleSync_${service}

const now = Date.now();
logger('📌 cacheArticles called', { lastSync });

if (lastSync && now - lastSync.value < REFRESH_INTERVAL_MS) return;

const mostRead = await getMostReadDataFromOfflinePage(service);

logger(`👀 fetched:`, { mostRead });

const mostReadArticles = mostRead?.items;
if (!mostReadArticles?.length) return;

const cache = await caches.open(cacheName);
const mostReadUrls = new Set(mostReadArticles.map(a => a.href));

// Delete stale articles not in most-read and older than 72h
const cachedArticleMeta = await dbGetAll(STORE_NAME);
const staleEntries = cachedArticleMeta.filter(entry => {
const isTooOld = now - entry.cachedAt > MAX_ARTICLE_AGE_MS;
const isNotMostRead = !mostReadUrls.has(entry.href);
return isTooOld && isNotMostRead;
});

await Promise.allSettled(
staleEntries.flatMap(entry => [
cache.delete(entry.href),
dbDelete(STORE_NAME, entry.href),
]),
);

// Cache new or updated articles including their scripts and stylesheets
const existingMeta = await Promise.all(
mostReadArticles.map(article => dbGet(STORE_NAME, article.href)),
);

await Promise.allSettled(
mostReadArticles.map(async (article, i) => {
const existing = existingMeta[i];

const isMissing = !existing;
const isOutdated =
!!existing &&
article.timestamp &&
existing.timestamp !== article.timestamp;

if (!isMissing && !isOutdated) return;
Comment on lines +196 to +202
Copy link
Copy Markdown
Contributor

@andrewscfc andrewscfc Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
const isMissing = !existing;
const isOutdated =
!!existing &&
article.timestamp &&
existing.timestamp !== article.timestamp;
if (!isMissing && !isOutdated) return;
const isCached = existing;
const isOutdated =
!isCached &&
article.timestamp &&
existing.timestamp !== article.timestamp;
if (isCached && !isOutdated) return;

I found this very difficult to read so just suggesting a change to get away from the negative conditons


const articleUrl = new URL(article.href, self.location.origin).href;
// Force refresh only when we know it's outdated, not when simply missing
await cachePageAndResources(cache, articleUrl, isOutdated);
await dbPut(STORE_NAME, {
url: article.href,
timestamp: article.timestamp,
cachedAt: now,
});
}),
);

await dbPut('meta', { key: 'lastArticleSync', value: now });
};

const CACHEABLE_FILES = [
Expand Down Expand Up @@ -88,13 +252,35 @@ self.addEventListener('activate', event => {
// -------Message Event-------------
self.addEventListener('message', async event => {
if (event.data?.type === 'PWA_STATUS') {
const clientId = event.source.id;
const { isPWA } = event.data;
const { isPWA, offlineArticle } = event.data;
const { isEnabled: isOfflineArticleEnabled, service } =
offlineArticle ?? {};

logger('📌 Message', { isOfflineArticleEnabled, service });

if (isPWA) {
pwaClients.set(clientId, true);
const service = getServiceFromUrl(event.source.url);
await cacheOfflinePageAndResources(service);
pwaClients.set(event.source.id, true);

await dbPut('meta', {
key: `offlineArticleEnabled_${service}`,
value: !!isOfflineArticleEnabled,
});

if (isOfflineArticleEnabled) {
const lastSync = await dbGet('meta', 'lastArticleSync');
const isOutdated =
!lastSync || Date.now() - lastSync.value >= REFRESH_INTERVAL_MS;

// Always cache offline page first — cacheArticles depends on it
await cacheOfflinePageAndResources(service, isOutdated);

if (isOutdated) {
await cacheArticles(service);
}
} else {
// Still cache the offline page itself regardless of article toggle
await cacheOfflinePageAndResources(service);
}
}
}
});
Expand Down Expand Up @@ -143,6 +329,7 @@ const fetchEventHandler = async event => {
);
} else if (isNavigationMode) {
const { url } = event.request;
logger('isNavigationMode', { url });
event.respondWith(
(async () => {
const client = await self.clients.get(event.clientId);
Expand All @@ -152,20 +339,45 @@ const fetchEventHandler = async event => {
const getOfflineFallback = async () => {
if (isPWA) {
const service = getServiceFromUrl(url);

const offlineArticleMeta = await dbGet(
'meta',
`offlineArticleEnabled_${service}`,
);
const isOfflineArticleEnabled = offlineArticleMeta?.value ?? false;

if (isOfflineArticleEnabled) {
const cachedArticle = await cache.match(url);

if (cachedArticle) {
// Check article is not outdated before serving
const articleMeta = await dbGet(STORE_NAME, url);
const isOutdated =
!articleMeta ||
Date.now() - articleMeta.cachedAt > MAX_ARTICLE_AGE_MS;

if (!isOutdated) {
logger('🎉 cachedArticle', { url });
isPWADeviceOffline = true;
return cachedArticle;
}

logger('⚠️ cachedArticle is outdated, skipping', { url });
}
}

// Offline page fallback is always available regardless of toggle
const offlineUrl = new URL(
getOfflinePageUrl(service),
self.location.origin,
).href;

const cachedOffline = await cache.match(offlineUrl);

if (cachedOffline) {
isPWADeviceOffline = true;
return cachedOffline;
}
}

// Fallback to browser default behavior
// If offline page/article not available, return error response
return Response.error();
};

Expand Down
35 changes: 33 additions & 2 deletions src/app/components/Image/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ const Image = ({
hasCaption,
isPortraitOrientation,
}: PropsWithChildren<ImageProps>) => {
const { pageType, isLite, isAmp } = use(RequestContext);
const { pageType, isLite, isAmp, isOfflineMode } = use(RequestContext);
const [isLoaded, setIsLoaded] = useState(false);
if (isLite) return null;

const showPlaceholder = placeholder && !isLoaded;
const showPlaceholder = placeholder && (!isLoaded || isOfflineMode);
const hasDimensions = width && height;
const hasFixedAspectRatio = !!aspectRatio || !!hasDimensions;
const [aspectRatioX, aspectRatioY] = aspectRatio ||
Expand Down Expand Up @@ -88,6 +88,37 @@ const Image = ({
};
const imgSrcSet = getImgSrcSet();
const imgSizes = getImgSizes();

// TODO: TBC how the images should be shown
if (isOfflineMode) {
return (
<div
className={className}
css={theme => [
styles.wrapper,
hasFixedAspectRatio
? styles.wrapperFixedAspectRatio
: styles.wrapperResponsiveRatio,
isPortraitOrientation && styles.portraitOrientation,
placeholder && [
styles.placeholder,
{
backgroundColor: darkPlaceholder
? theme.palette.SHADOW
: theme.palette.LUNAR,
},
],
]}
style={{
paddingBottom: hasFixedAspectRatio ? legacyBrowserAspectRatio : 0,
...(!hasCaption && { overflow: 'hidden' }),
}}
>
{children}
</div>
);
}

return (
<>
{preload && (
Expand Down
Loading
Loading