logo-darkPipe0

Scrape list

Turn a list of URLs into clean scraped data.

ProviderBilling Mode
Docs
Connection
Docs
Cost per operation
Docs
Event
P
Firecrawl
On Success
Managed, User

0.10 credits

firecrawl-list-url-scrape

Input Fields

json
urls

Output Fields

json
scrape_list_response

Code Examples

const options = {
  method: 'POST',
  headers: {'content-type': 'application/json', authorization: 'Bearer <TOKEN>'},
  body: JSON.stringify({
    config: {environment: 'production'},
    pipes: [
      {
        pipe_id: 'website:scrapelist:firecrawl@1',
        config: {scrape_main_only: true, formats: ['markdown', 'links']}
      }
    ],
    input: [{id: '1', urls: ['https://pipe0.com', 'https://pipe0.com/docs']}]
  })
};

fetch('https://api.pipe0.com/v1/pipes/run', options)
  .then(res => res.json())
  .then(res => console.log(res))
  .catch(err => console.error(err));
import requests

url = "https://api.pipe0.com/v1/pipes/run"

payload = {
    "config": { "environment": "production" },
    "pipes": [
        {
            "pipe_id": "website:scrapelist:firecrawl@1",
            "config": {
                "scrape_main_only": True,
                "formats": ["markdown", "links"]
            }
        }
    ],
    "input": [
        {
            "id": "1",
            "urls": ["https://pipe0.com", "https://pipe0.com/docs"]
        }
    ]
}
headers = {
    "content-type": "application/json",
    "authorization": "Bearer <TOKEN>"
}

response = requests.post(url, json=payload, headers=headers)

print(response.text)
curl --request POST \
     --url https://api.pipe0.com/v1/pipes/run \
     --header 'authorization: Bearer <TOKEN>' \
     --header 'content-type: application/json' \
     --data '
{
  "config": {
    "environment": "production"
  },
  "pipes": [
    {
      "pipe_id": "website:scrapelist:firecrawl@1",
      "config": {
        "scrape_main_only": true,
        "formats": [
          "markdown",
          "links"
        ]
      }
    }
  ],
  "input": [
    {
      "id": "1",
      "urls": [
        "https://pipe0.com",
        "https://pipe0.com/docs"
      ]
    }
  ]
}
'
package main

import (
	"fmt"
	"strings"
	"net/http"
	"io"
)

func main() {

	url := "https://api.pipe0.com/v1/pipes/run"

	payload := strings.NewReader("{\"config\":{\"environment\":\"production\"},\"pipes\":[{\"pipe_id\":\"website:scrapelist:firecrawl@1\",\"config\":{\"scrape_main_only\":true,\"formats\":[\"markdown\",\"links\"]}}],\"input\":[{\"id\":\"1\",\"urls\":[\"https://pipe0.com\",\"https://pipe0.com/docs\"]}]}")

	req, _ := http.NewRequest("POST", url, payload)

	req.Header.Add("content-type", "application/json")
	req.Header.Add("authorization", "Bearer <TOKEN>")

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := io.ReadAll(res.Body)

	fmt.Println(string(body))

}
<?php
require_once('vendor/autoload.php');

$client = new \GuzzleHttp\Client();

$response = $client->request('POST', 'https://api.pipe0.com/v1/pipes/run', [
  'body' => '{"config":{"environment":"production"},"pipes":[{"pipe_id":"website:scrapelist:firecrawl@1","config":{"scrape_main_only":true,"formats":["markdown","links"]}}],"input":[{"id":"1","urls":["https://pipe0.com","https://pipe0.com/docs"]}]}',
  'headers' => [
    'authorization' => 'Bearer <TOKEN>',
    'content-type' => 'application/json',
  ],
]);

echo $response->getBody();
POST /v1/pipes/run HTTP/1.1
Content-Type: application/json
Authorization: Bearer <TOKEN>
Host: api.pipe0.com
Content-Length: 235

{"config":{"environment":"production"},"pipes":[{"pipe_id":"website:scrapelist:firecrawl@1","config":{"scrape_main_only":true,"formats":["markdown","links"]}}],"input":[{"id":"1","urls":["https://pipe0.com","https://pipe0.com/docs"]}]}