Skip to content

Commit

Permalink
feat: fix normalizeUrl; improve firecrawl, perigon, scraper clients
Browse files Browse the repository at this point in the history
  • Loading branch information
transitive-bullshit committed Jun 18, 2024
1 parent c31d4e0 commit c0a5323
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 40 deletions.
29 changes: 15 additions & 14 deletions src/services/firecrawl-client.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,19 @@
import defaultKy, { type KyInstance } from 'ky'
import pThrottle from 'p-throttle'
import z from 'zod'

import { aiFunction, AIFunctionsProvider } from '../fns.js'
import { assert, delay, getEnv } from '../utils.js'
import { assert, delay, getEnv, throttleKy } from '../utils.js'
import { zodToJsonSchema } from '../zod-to-json-schema.js'

export namespace firecrawl {
// Allow up to 1 request per second by default.
export const throttle = pThrottle({
limit: 1,
interval: 1000,
strict: true
})

/**
* Generic parameter interface.
*/
Expand Down Expand Up @@ -96,11 +104,13 @@ export class FirecrawlClient extends AIFunctionsProvider {
apiKey = getEnv('FIRECRAWL_API_KEY'),
apiBaseUrl = getEnv('FIRECRAWL_API_BASE_URL') ??
'https://api.firecrawl.dev',
throttle = true,
timeoutMs = 60_000,
ky = defaultKy
}: {
apiKey?: string
apiBaseUrl?: string
throttle?: boolean
timeoutMs?: number
ky?: KyInstance
} = {}) {
Expand All @@ -117,7 +127,9 @@ export class FirecrawlClient extends AIFunctionsProvider {
this.apiKey = apiKey
this.apiBaseUrl = apiBaseUrl

this.ky = ky.extend({
const throttledKy = throttle ? throttleKy(ky, firecrawl.throttle) : ky

this.ky = throttledKy.extend({
prefixUrl: apiBaseUrl,
timeout: timeoutMs,
headers: {
Expand Down Expand Up @@ -155,18 +167,7 @@ export class FirecrawlClient extends AIFunctionsProvider {
}
}

const res = await this.ky
.post('v0/scrape', { json })
.json<firecrawl.ScrapeResponse>()

if (!res.success || !res.data) return res

if (res.data.markdown) {
delete res.data.html
delete res.data.content
}

return res
return this.ky.post('v0/scrape', { json }).json<firecrawl.ScrapeResponse>()
}

async search(
Expand Down
48 changes: 24 additions & 24 deletions src/services/perigon-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -681,20 +681,20 @@ export class PerigonClient extends AIFunctionsProvider {
})
})
async searchArticles(opts: perigon.ArticlesSearchOptions) {
const searchParams = sanitizeSearchParams({
sortBy: 'relevance',
...opts,
apiKey: this.apiKey,
size: Math.max(
1,
Math.min(perigon.MAX_PAGE_SIZE, opts.size || perigon.DEFAULT_PAGE_SIZE)
)
})
console.log('perigon.searchArticles', searchParams)

return this.ky
.get('all', {
searchParams
searchParams: sanitizeSearchParams({
sortBy: 'relevance',
...opts,
apiKey: this.apiKey,
size: Math.max(
1,
Math.min(
perigon.MAX_PAGE_SIZE,
opts.size || perigon.DEFAULT_PAGE_SIZE
)
)
})
})
.json<perigon.ArticlesSearchResponse>()
}
Expand All @@ -721,20 +721,20 @@ export class PerigonClient extends AIFunctionsProvider {
})
})
async searchStories(opts: perigon.StoriesSearchOptions) {
const searchParams = sanitizeSearchParams({
sortBy: 'relevance',
...opts,
apiKey: this.apiKey,
size: Math.max(
1,
Math.min(perigon.MAX_PAGE_SIZE, opts.size || perigon.DEFAULT_PAGE_SIZE)
)
})
console.log('perigon.searchStories', searchParams)

return this.ky
.get('stories/all', {
searchParams
searchParams: sanitizeSearchParams({
sortBy: 'relevance',
...opts,
apiKey: this.apiKey,
size: Math.max(
1,
Math.min(
perigon.MAX_PAGE_SIZE,
opts.size || perigon.DEFAULT_PAGE_SIZE
)
)
})
})
.json<perigon.StoriesSearchResponse>()
}
Expand Down
2 changes: 1 addition & 1 deletion src/services/scraper-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ export class ScraperClient extends AIFunctionsProvider {
| string
| {
url: string
format?: 'html' | 'markdown' | 'plaintext'
format?: 'html' | 'markdown' | 'plaintext' | 'all'
timeoutMs?: number
}
): Promise<Partial<scraper.ScrapeResult>> {
Expand Down
3 changes: 3 additions & 0 deletions src/url-utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ describe('normalizeUrl', () => {
expect(normalizeUrl('https://google.com/abc/123//')).toBe(
'https://google.com/abc/123'
)
expect(normalizeUrl('//google.com')).toBe('https://google.com')
expect(normalizeUrl('google.com')).toBe('https://google.com')
expect(normalizeUrl('abc.foo.com')).toBe('https://abc.foo.com')
})

test('invalid urls', async () => {
Expand Down
10 changes: 9 additions & 1 deletion src/url-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,18 @@ export function normalizeUrl(
): string | undefined {
let normalizedUrl: string | undefined

if (!url || isRelativeUrl(url)) {
if (!url || typeof url !== 'string') {
return undefined
}

if (isRelativeUrl(url)) {
if (!/^[./]/.test(url) && url.indexOf('.') > 0) {
url = `https://${url}`
} else {
return undefined
}
}

const opts = {
stripWWW: false,
defaultProtocol: 'https',
Expand Down

0 comments on commit c0a5323

Please sign in to comment.