diff --git a/example/src/github_trending.ts b/example/src/github_trending.ts new file mode 100644 index 0000000..35ad4e0 --- /dev/null +++ b/example/src/github_trending.ts @@ -0,0 +1,51 @@ +import { agent } from '@dead-simple-ai-agent/framework/agent' +import { teamwork } from '@dead-simple-ai-agent/framework/teamwork' +import { logger } from '@dead-simple-ai-agent/framework/telemetry' +import { solution, workflow } from '@dead-simple-ai-agent/framework/workflow' + +import { createFireCrawlTool } from './tools/firecrawlScrape.js' +import { getApiKey } from './tools/utils.js' + +const apiKey = await getApiKey('Firecrawl.dev API Key', 'FIRECRAWL_API_KEY') + +const { firecrawlScrape } = createFireCrawlTool({ + apiKey, +}) + +const githubResearcher = agent({ + role: 'Github Researcher', + description: ` + You are skilled at browsing what's hot on Github trending page. + `, + tools: { + firecrawlScrape, + }, +}) + +const wrapupRedactor = agent({ + role: 'Redactor', + description: ` + Your role is to wrap up reports. + You're famous of beautiful Markdown formatting. + `, +}) + +const wrapUpTrending = workflow({ + members: [githubResearcher, wrapupRedactor], + description: ` + Research the URL "https://github.com/trending/python" page using scraper tool + Get 3 top projects. You can get the title and description from the project page. + Then summarize it all into a comprehensive report markdown output. + + Here are some ground rules to follow: + - Include one sentence summary for each project. + `, + output: ` + Comprehensive markdown report with the top trending python projects. + `, + snapshot: logger, +}) + +const result = await teamwork(wrapUpTrending) + +console.log(solution(result)) diff --git a/example/src/tools/firecrawlScrape.ts b/example/src/tools/firecrawlScrape.ts new file mode 100644 index 0000000..0522456 --- /dev/null +++ b/example/src/tools/firecrawlScrape.ts @@ -0,0 +1,112 @@ +import { tool } from '@dead-simple-ai-agent/framework/tool' +import { RequiredOptionals } from '@dead-simple-ai-agent/framework/types' +import axios from 'axios' +import s from 'dedent' +import { z } from 'zod' + +/** + * Configuration options for FireCrawl API + * @see https://docs.firecrawl.dev + */ +interface FireCrawlOptions { + /** + * API Key for authentication with FireCrawl API + * Required for all API calls. Get one at https://firecrawl.dev + */ + apiKey: string + + /** + * Default output formats for the scrape + * Specifies the formats to include in the response (e.g., 'markdown', 'html') + * @default ['markdown', 'html'] + */ + formats?: string[] + + /** Firecrawl API endpoint + * @default 'https://api.firecrawl.dev/v1/scrape' + */ + url?: string +} + +const defaults: RequiredOptionals = { + formats: ['markdown'], + url: 'https://api.firecrawl.dev/v1/scrape', +} + +const FireCrawlResponseSchema = z.object({ + success: z.boolean(), + data: z.object({ + markdown: z.string().optional(), + html: z.string().optional(), + metadata: z.object({ + title: z.string().optional(), + description: z.string().optional(), + language: z.string().optional(), + keywords: z.string().optional(), + robots: z.string().optional(), + ogTitle: z.string().optional(), + ogDescription: z.string().optional(), + ogUrl: z.string().optional(), + ogImage: z.string().optional(), + sourceURL: z.string().optional(), + statusCode: z.number().optional(), + }), + }), +}) + +export const createFireCrawlTool = (options: FireCrawlOptions) => { + const config = { + ...defaults, + ...options, + } + + const request = { + headers: { + Authorization: `Bearer ${config.apiKey}`, + 'Content-Type': 'application/json', + }, + } + + return { + firecrawlScrape: tool({ + description: + 'Scrape a website and return its content in specified formats using the FireCrawl API', + parameters: z.object({ + url: z.string().describe('URL of the website to scrape'), + formats: z + .array(z.string()) + .optional() + .describe('Output formats to include (options: markdown, html)'), + }), + execute: async ({ url, formats }) => { + const body = { + url, + formats: formats || config.formats, + } + + try { + console.log(request, body) + const response = await axios.post(config.url, body, request) + console.log(response) + + const parsedResponse = FireCrawlResponseSchema.parse(response.data) + + if (!parsedResponse.success) { + throw new Error('Failed to scrape the website.') + } + + const { markdown, html, metadata } = parsedResponse.data + + return s` + Scraped content for URL "${url}": + ${markdown ? `\nMarkdown:\n${markdown}` : ''} + ${html ? `\nHTML:\n${html}` : ''} + \nMetadata:\n${JSON.stringify(metadata, null, 2)} + ` + } catch (error) { + return `Error scraping website: ${error.message}` + } + }, + }), + } +}