-
-
Notifications
You must be signed in to change notification settings - Fork 2.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: add typegen for loaders #11358
Merged
Merged
Changes from 23 commits
Commits
Show all changes
30 commits
Select commit
Hold shift + click to select a range
aa1db49
wip
ascorbic a7639e3
wip
ascorbic 63a23d2
wip
ascorbic 624c377
Update demo
ascorbic d67ee7e
Add meta
ascorbic 631f3d2
wip
ascorbic 4b7f8f7
Add file loader
ascorbic 14330f7
Add schema validation
ascorbic b3b8dd4
Remove log
ascorbic 04d09f0
Merge branch 'main' into content-layer-loader
ascorbic c932fb8
Changeset
ascorbic 941c4be
Format
ascorbic f997ca5
Lockfile
ascorbic 92c3312
Fix type
ascorbic e0bd9cd
Merge branch 'main' into content-layer-loader
ascorbic 5cd5670
Handle loading for data store JSON
ascorbic 6ca356a
Merge branch 'main' into content-layer-loader
ascorbic 3fcdc59
Use rollup util to import JSON
ascorbic 14322d4
Fix types
ascorbic 3e21f03
Merge branch 'main' into content-layer-loader
ascorbic 8d310af
Format
ascorbic d8f2d6e
feat: add typegen for loaders
ascorbic 527073d
Change back to direct zod import
ascorbic 0a89f15
Merge branch 'content-layer' into content-layer-loader
ascorbic 51fcda3
Merge branch 'content-layer' into content-layer-loader
ascorbic 9de133f
Add tests
ascorbic 21a7c37
Merge branch 'content-layer-loader' into loader-types
ascorbic 6bb89bc
Changes from review
ascorbic 47649ca
Merge branch 'content-layer-loader' into loader-types
ascorbic e98c6d5
Merge branch 'content-layer' into loader-types
ascorbic File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
--- | ||
'astro': minor | ||
--- | ||
|
||
Implements Content Layer |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
import { promises as fs, type PathLike, existsSync } from 'fs'; | ||
export class DataStore { | ||
#collections = new Map<string, Map<string, any>>(); | ||
constructor() { | ||
this.#collections = new Map(); | ||
} | ||
get(collectionName: string, key: string) { | ||
return this.#collections.get(collectionName)?.get(String(key)); | ||
} | ||
entries(collectionName: string): IterableIterator<[id: string, any]> { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
return collection.entries(); | ||
} | ||
set(collectionName: string, key: string, value: any) { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
collection.set(String(key), value); | ||
this.#collections.set(collectionName, collection); | ||
} | ||
delete(collectionName: string, key: string) { | ||
const collection = this.#collections.get(collectionName); | ||
if (collection) { | ||
collection.delete(String(key)); | ||
} | ||
} | ||
clear(collectionName: string) { | ||
this.#collections.delete(collectionName); | ||
} | ||
|
||
has(collectionName: string, key: string) { | ||
const collection = this.#collections.get(collectionName); | ||
if (collection) { | ||
return collection.has(String(key)); | ||
} | ||
return false; | ||
} | ||
|
||
hasCollection(collectionName: string) { | ||
return this.#collections.has(collectionName); | ||
} | ||
|
||
collections() { | ||
return this.#collections; | ||
} | ||
|
||
scopedStore(collectionName: string): ScopedDataStore { | ||
return { | ||
get: (key: string) => this.get(collectionName, key), | ||
entries: () => this.entries(collectionName), | ||
set: (key: string, value: any) => this.set(collectionName, key, value), | ||
delete: (key: string) => this.delete(collectionName, key), | ||
clear: () => this.clear(collectionName), | ||
has: (key: string) => this.has(collectionName, key), | ||
}; | ||
} | ||
|
||
metaStore(collectionName: string): MetaStore { | ||
return this.scopedStore(`meta:${collectionName}`); | ||
} | ||
|
||
toString() { | ||
return JSON.stringify( | ||
Array.from(this.#collections.entries()).map(([collectionName, collection]) => { | ||
return [collectionName, Array.from(collection.entries())]; | ||
}) | ||
); | ||
} | ||
|
||
async writeToDisk(filePath: PathLike) { | ||
await fs.writeFile(filePath, this.toString()); | ||
} | ||
|
||
static async fromDisk(filePath: PathLike) { | ||
if (!existsSync(filePath)) { | ||
return new DataStore(); | ||
} | ||
const str = await fs.readFile(filePath, 'utf-8'); | ||
return DataStore.fromString(str); | ||
} | ||
|
||
static fromString(str: string) { | ||
const entries = JSON.parse(str); | ||
return DataStore.fromJSON(entries); | ||
} | ||
|
||
static async fromModule() { | ||
try { | ||
// @ts-expect-error | ||
const data = await import('astro:data-layer-content'); | ||
return DataStore.fromJSON(data.default); | ||
} catch {} | ||
return new DataStore(); | ||
} | ||
|
||
static fromJSON(entries: Array<[string, Array<[string, any]>]>) { | ||
const collections = new Map<string, Map<string, any>>(); | ||
for (const [collectionName, collection] of entries) { | ||
collections.set(collectionName, new Map(collection)); | ||
} | ||
const store = new DataStore(); | ||
store.#collections = collections; | ||
return store; | ||
} | ||
} | ||
|
||
export interface ScopedDataStore { | ||
get: (key: string) => any; | ||
entries: () => IterableIterator<[id: string, any]>; | ||
set: (key: string, value: any) => void; | ||
delete: (key: string) => void; | ||
clear: () => void; | ||
has: (key: string) => boolean; | ||
} | ||
|
||
export interface MetaStore { | ||
get: (key: string) => string | undefined; | ||
set: (key: string, value: string) => void; | ||
has: (key: string) => boolean; | ||
} | ||
|
||
function dataStoreSingleton() { | ||
let instance: Promise<DataStore> | DataStore | undefined = undefined; | ||
return { | ||
get: async () => { | ||
if (!instance) { | ||
instance = DataStore.fromModule(); | ||
} | ||
return instance; | ||
}, | ||
set: (store: DataStore) => { | ||
instance = store; | ||
}, | ||
}; | ||
} | ||
|
||
export const globalDataStore = dataStoreSingleton(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,51 @@ | ||
import { fileURLToPath } from 'url'; | ||
import type { Loader } from './loaders.js'; | ||
import { promises as fs, existsSync } from 'fs'; | ||
|
||
/** | ||
* Loads entries from a JSON file. The file must contain an array of objects that contain unique `id` fields, or an object with string keys. | ||
* @todo Add support for other file types, such as YAML, CSV etc. | ||
* @param fileName The path to the JSON file to load, relative to the content directory. | ||
*/ | ||
export function file(fileName: string): Loader { | ||
if (fileName.includes('*')) { | ||
throw new Error('Glob patterns are not supported in file loader. Use `glob` loader instead.'); | ||
} | ||
return { | ||
name: 'file-loader', | ||
load: async ({ store, logger, settings, parseData }) => { | ||
const contentDir = new URL('./content/', settings.config.srcDir); | ||
|
||
const url = new URL(fileName, contentDir); | ||
if (!existsSync(url)) { | ||
logger.error(`File not found: ${fileName}`); | ||
return; | ||
} | ||
|
||
const data = await fs.readFile(url, 'utf-8'); | ||
const json = JSON.parse(data); | ||
|
||
const filePath = fileURLToPath(url); | ||
|
||
if (Array.isArray(json)) { | ||
if (json.length === 0) { | ||
logger.warn(`No items found in ${fileName}`); | ||
} | ||
for (const rawItem of json) { | ||
const id = rawItem.id ?? rawItem.slug; | ||
const item = await parseData({ id, data: rawItem, filePath }); | ||
store.set(id, item); | ||
} | ||
} else if (typeof json === 'object') { | ||
for (const [id, rawItem] of Object.entries<Record<string, unknown>>(json)) { | ||
const item = await parseData({ id, data: rawItem, filePath }); | ||
store.set(id, item); | ||
} | ||
} else { | ||
logger.error(`Invalid data in ${fileName}. Must be an array or object.`); | ||
} | ||
|
||
logger.info('Loading posts'); | ||
}, | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,115 @@ | ||
import type { ZodSchema } from 'zod'; | ||
import type { AstroSettings } from '../@types/astro.js'; | ||
import type { AstroIntegrationLogger, Logger } from '../core/logger/core.js'; | ||
import { DataStore, globalDataStore, type MetaStore, type ScopedDataStore } from './data-store.js'; | ||
import { getEntryData, globalContentConfigObserver } from './utils.js'; | ||
import { promises as fs, existsSync } from 'fs'; | ||
|
||
export interface ParseDataOptions { | ||
/** The ID of the entry. Unique per collection */ | ||
id: string; | ||
/** The raw, unvalidated data of the entry */ | ||
data: Record<string, unknown>; | ||
/** An optional file path, where the entry represents a local file */ | ||
filePath?: string; | ||
} | ||
|
||
export interface LoaderContext { | ||
collection: string; | ||
/** A database abstraction to store the actual data */ | ||
store: ScopedDataStore; | ||
/** A simple KV store, designed for things like sync tokens */ | ||
meta: MetaStore; | ||
logger: AstroIntegrationLogger; | ||
|
||
settings: AstroSettings; | ||
|
||
/** Validates and parses the data according to the schema */ | ||
parseData<T extends Record<string, unknown> = Record<string, unknown>>( | ||
props: ParseDataOptions | ||
): T; | ||
} | ||
|
||
export interface Loader { | ||
/** Unique name of the loader, e.g. the npm package name */ | ||
name: string; | ||
/** Do the actual loading of the data */ | ||
load: (context: LoaderContext) => Promise<void>; | ||
/** Optionally, define the schema of the data. Will be overridden by user-defined schema */ | ||
schema?: ZodSchema | Promise<ZodSchema> | (() => ZodSchema | Promise<ZodSchema>); | ||
render?: (entry: any) => any; | ||
} | ||
export async function syncDataLayer({ | ||
settings, | ||
logger: globalLogger, | ||
store, | ||
}: { settings: AstroSettings; logger: Logger; store?: DataStore }) { | ||
const logger = globalLogger.forkIntegrationLogger('content'); | ||
if (!store) { | ||
store = await DataStore.fromDisk(new URL('data-store.json', settings.config.cacheDir)); | ||
globalDataStore.set(store); | ||
} | ||
const contentConfig = globalContentConfigObserver.get(); | ||
if (contentConfig?.status !== 'loaded') { | ||
logger.debug('Content config not loaded, skipping sync'); | ||
return; | ||
} | ||
await Promise.all( | ||
Object.entries(contentConfig.config.collections).map(async ([name, collection]) => { | ||
if (collection.type !== 'experimental_data') { | ||
return; | ||
} | ||
|
||
let { schema } = collection; | ||
|
||
if (!schema) { | ||
schema = collection.loader.schema; | ||
} | ||
|
||
if (typeof schema === 'function') { | ||
schema = await schema({ | ||
image: () => { | ||
throw new Error('Images are currently not supported for experimental data collections'); | ||
}, | ||
}); | ||
} | ||
|
||
const collectionWithResolvedSchema = { ...collection, schema }; | ||
|
||
function parseData<T extends Record<string, unknown> = Record<string, unknown>>({ | ||
id, | ||
data, | ||
filePath = '', | ||
}: { id: string; data: T; filePath?: string }): T { | ||
return getEntryData( | ||
{ | ||
id, | ||
collection: name, | ||
unvalidatedData: data, | ||
_internal: { | ||
rawData: undefined, | ||
filePath, | ||
}, | ||
}, | ||
collectionWithResolvedSchema, | ||
false | ||
) as unknown as T; | ||
} | ||
|
||
return collection.loader.load({ | ||
collection: name, | ||
store: store.scopedStore(name), | ||
meta: store.metaStore(name), | ||
logger, | ||
settings, | ||
parseData, | ||
}); | ||
}) | ||
); | ||
const cacheFile = new URL('data-store.json', settings.config.cacheDir); | ||
if (!existsSync(settings.config.cacheDir)) { | ||
await fs.mkdir(settings.config.cacheDir, { recursive: true }); | ||
} | ||
await store.writeToDisk(cacheFile); | ||
logger.info('Synced content'); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Making this generic messed up inference for top level schemas