Allow custom headers in upload-link endpoint (#3695)
* allow custom headers in upload-link endpoint * override loader.scrape to allow for passing of headers in langchain puppeteer * lint * Rename some variables move positional args to named args update documentation to reflect arg changes and funciton sigs validate header object before attempting to end to forward to request * update header validation for custom headers --------- Co-authored-by: timothycarambat <rambat1010@gmail.com>
This commit is contained in:
parent
21ffabfb15
commit
610bdd4673
@ -62,9 +62,13 @@ app.post(
|
||||
"/process-link",
|
||||
[verifyPayloadIntegrity],
|
||||
async function (request, response) {
|
||||
const { link } = reqBody(request);
|
||||
const { link, scraperHeaders = {} } = reqBody(request);
|
||||
try {
|
||||
const { success, reason, documents = [] } = await processLink(link);
|
||||
const {
|
||||
success,
|
||||
reason,
|
||||
documents = [],
|
||||
} = await processLink(link, scraperHeaders);
|
||||
response.status(200).json({ url: link, success, reason, documents });
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
|
||||
@ -8,18 +8,25 @@ const { default: slugify } = require("slugify");
|
||||
|
||||
/**
|
||||
* Scrape a generic URL and return the content in the specified format
|
||||
* @param {string} link - The URL to scrape
|
||||
* @param {('html' | 'text')} captureAs - The format to capture the page content as
|
||||
* @param {boolean} processAsDocument - Whether to process the content as a document or return the content directly
|
||||
* @param {Object} config - The configuration object
|
||||
* @param {string} config.link - The URL to scrape
|
||||
* @param {('html' | 'text')} config.captureAs - The format to capture the page content as. Default is 'text'
|
||||
* @param {boolean} config.processAsDocument - Whether to process the content as a document or return the content directly. Default is true
|
||||
* @param {{[key: string]: string}} config.scraperHeaders - Custom headers to use when making the request
|
||||
* @returns {Promise<Object>} - The content of the page
|
||||
*/
|
||||
async function scrapeGenericUrl(
|
||||
async function scrapeGenericUrl({
|
||||
link,
|
||||
captureAs = "text",
|
||||
processAsDocument = true
|
||||
) {
|
||||
processAsDocument = true,
|
||||
scraperHeaders = {},
|
||||
}) {
|
||||
console.log(`-- Working URL ${link} => (${captureAs}) --`);
|
||||
const content = await getPageContent(link, captureAs);
|
||||
const content = await getPageContent({
|
||||
link,
|
||||
captureAs,
|
||||
headers: scraperHeaders,
|
||||
});
|
||||
|
||||
if (!content.length) {
|
||||
console.error(`Resulting URL content was empty at ${link}.`);
|
||||
@ -63,13 +70,38 @@ async function scrapeGenericUrl(
|
||||
return { success: true, reason: null, documents: [document] };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the headers object
|
||||
* - Keys & Values must be strings and not empty
|
||||
* - Assemble a new object with only the valid keys and values
|
||||
* @param {{[key: string]: string}} headers - The headers object to validate
|
||||
* @returns {{[key: string]: string}} - The validated headers object
|
||||
*/
|
||||
function validatedHeaders(headers = {}) {
|
||||
try {
|
||||
if (Object.keys(headers).length === 0) return {};
|
||||
let validHeaders = {};
|
||||
for (const key of Object.keys(headers)) {
|
||||
if (!key?.trim()) continue;
|
||||
if (typeof headers[key] !== "string" || !headers[key]?.trim()) continue;
|
||||
validHeaders[key] = headers[key].trim();
|
||||
}
|
||||
return validHeaders;
|
||||
} catch (error) {
|
||||
console.error("Error validating headers", error);
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the content of a page
|
||||
* @param {string} link - The URL to get the content of
|
||||
* @param {('html' | 'text')} captureAs - The format to capture the page content as
|
||||
* @param {Object} config - The configuration object
|
||||
* @param {string} config.link - The URL to get the content of
|
||||
* @param {('html' | 'text')} config.captureAs - The format to capture the page content as. Default is 'text'
|
||||
* @param {{[key: string]: string}} config.headers - Custom headers to use when making the request
|
||||
* @returns {Promise<string>} - The content of the page
|
||||
*/
|
||||
async function getPageContent(link, captureAs = "text") {
|
||||
async function getPageContent({ link, captureAs = "text", headers = {} }) {
|
||||
try {
|
||||
let pageContents = [];
|
||||
const loader = new PuppeteerWebBaseLoader(link, {
|
||||
@ -91,12 +123,37 @@ async function getPageContent(link, captureAs = "text") {
|
||||
},
|
||||
});
|
||||
|
||||
const docs = await loader.load();
|
||||
// Override scrape method if headers are available
|
||||
let overrideHeaders = validatedHeaders(headers);
|
||||
if (Object.keys(overrideHeaders).length > 0) {
|
||||
loader.scrape = async function () {
|
||||
const { launch } = await PuppeteerWebBaseLoader.imports();
|
||||
const browser = await launch({
|
||||
headless: "new",
|
||||
defaultViewport: null,
|
||||
ignoreDefaultArgs: ["--disable-extensions"],
|
||||
...this.options?.launchOptions,
|
||||
});
|
||||
const page = await browser.newPage();
|
||||
await page.setExtraHTTPHeaders(overrideHeaders);
|
||||
|
||||
for (const doc of docs) {
|
||||
pageContents.push(doc.pageContent);
|
||||
await page.goto(this.webPath, {
|
||||
timeout: 180000,
|
||||
waitUntil: "networkidle2",
|
||||
...this.options?.gotoOptions,
|
||||
});
|
||||
|
||||
const bodyHTML = this.options?.evaluate
|
||||
? await this.options.evaluate(page, browser)
|
||||
: await page.evaluate(() => document.body.innerHTML);
|
||||
|
||||
await browser.close();
|
||||
return bodyHTML;
|
||||
};
|
||||
}
|
||||
|
||||
const docs = await loader.load();
|
||||
for (const doc of docs) pageContents.push(doc.pageContent);
|
||||
return pageContents.join(" ");
|
||||
} catch (error) {
|
||||
console.error(
|
||||
@ -112,6 +169,7 @@ async function getPageContent(link, captureAs = "text") {
|
||||
"Content-Type": "text/plain",
|
||||
"User-Agent":
|
||||
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36,gzip(gfe)",
|
||||
...validatedHeaders(headers),
|
||||
},
|
||||
}).then((res) => res.text());
|
||||
return pageText;
|
||||
|
||||
@ -1,20 +1,37 @@
|
||||
const { validURL } = require("../utils/url");
|
||||
const { scrapeGenericUrl } = require("./convert/generic");
|
||||
|
||||
async function processLink(link) {
|
||||
/**
|
||||
* Process a link and return the text content. This util will save the link as a document
|
||||
* so it can be used for embedding later.
|
||||
* @param {string} link - The link to process
|
||||
* @param {{[key: string]: string}} scraperHeaders - Custom headers to apply when scraping the link
|
||||
* @returns {Promise<{success: boolean, content: string}>} - Response from collector
|
||||
*/
|
||||
async function processLink(link, scraperHeaders = {}) {
|
||||
if (!validURL(link)) return { success: false, reason: "Not a valid URL." };
|
||||
return await scrapeGenericUrl(link);
|
||||
return await scrapeGenericUrl({
|
||||
link,
|
||||
captureAs: "text",
|
||||
processAsDocument: true,
|
||||
scraperHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the text content of a link
|
||||
* Get the text content of a link - does not save the link as a document
|
||||
* Mostly used in agentic flows/tools calls to get the text content of a link
|
||||
* @param {string} link - The link to get the text content of
|
||||
* @param {('html' | 'text' | 'json')} captureAs - The format to capture the page content as
|
||||
* @returns {Promise<{success: boolean, content: string}>} - Response from collector
|
||||
*/
|
||||
async function getLinkText(link, captureAs = "text") {
|
||||
if (!validURL(link)) return { success: false, reason: "Not a valid URL." };
|
||||
return await scrapeGenericUrl(link, captureAs, false);
|
||||
return await scrapeGenericUrl({
|
||||
link,
|
||||
captureAs,
|
||||
processAsDocument: false,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@ -322,7 +322,11 @@ function apiDocumentEndpoints(app) {
|
||||
type: 'object',
|
||||
example: {
|
||||
"link": "https://anythingllm.com",
|
||||
"addToWorkspaces": "workspace1,workspace2"
|
||||
"addToWorkspaces": "workspace1,workspace2",
|
||||
"scraperHeaders": {
|
||||
"Authorization": "Bearer token123",
|
||||
"My-Custom-Header": "value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -365,7 +369,11 @@ function apiDocumentEndpoints(app) {
|
||||
*/
|
||||
try {
|
||||
const Collector = new CollectorApi();
|
||||
const { link, addToWorkspaces = "" } = reqBody(request);
|
||||
const {
|
||||
link,
|
||||
addToWorkspaces = "",
|
||||
scraperHeaders = {},
|
||||
} = reqBody(request);
|
||||
const processingOnline = await Collector.online();
|
||||
|
||||
if (!processingOnline) {
|
||||
@ -379,8 +387,10 @@ function apiDocumentEndpoints(app) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { success, reason, documents } =
|
||||
await Collector.processLink(link);
|
||||
const { success, reason, documents } = await Collector.processLink(
|
||||
link,
|
||||
scraperHeaders
|
||||
);
|
||||
if (!success) {
|
||||
response
|
||||
.status(500)
|
||||
|
||||
@ -1092,7 +1092,11 @@
|
||||
"type": "object",
|
||||
"example": {
|
||||
"link": "https://anythingllm.com",
|
||||
"addToWorkspaces": "workspace1,workspace2"
|
||||
"addToWorkspaces": "workspace1,workspace2",
|
||||
"scraperHeaders": {
|
||||
"Authorization": "Bearer token123",
|
||||
"My-Custom-Header": "value"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -101,12 +101,18 @@ class CollectorApi {
|
||||
* Process a link
|
||||
* - Will append the options to the request body
|
||||
* @param {string} link - The link to process
|
||||
* @param {{[key: string]: string}} scraperHeaders - Custom headers to apply to the web-scraping request URL
|
||||
* @returns {Promise<Object>} - The response from the collector API
|
||||
*/
|
||||
async processLink(link = "") {
|
||||
async processLink(link = "", scraperHeaders = {}) {
|
||||
if (!link) return false;
|
||||
|
||||
const data = JSON.stringify({ link, options: this.#attachOptions() });
|
||||
const data = JSON.stringify({
|
||||
link,
|
||||
scraperHeaders,
|
||||
options: this.#attachOptions(),
|
||||
});
|
||||
|
||||
return await fetch(`${this.endpoint}/process-link`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
|
||||
Loading…
Reference in New Issue
Block a user