Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More ā†’
Socket
Sign inDemoInstall
Socket

opensea-scraper

Package Overview
Dependencies
Maintainers
1
Versions
29
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

opensea-scraper - npm Package Compare versions

Comparing version 3.0.0 to 3.0.1

src/functions/offersByUrl.js

11

demo.js

@@ -28,6 +28,13 @@ const OpenseaScraper = require("./src/index.js");

console.log(`\n\n\n\nāœ… === OpenseaScraper.offers(slug, resultSize) ===`);
const resultSize = 3;
let resultSize = 3;
const offers = await OpenseaScraper.offers(slug, resultSize);
console.log(`scraped ${offers.length} offers: ${offers.map(o => `${o.name} : ${o.floorPrice.amount} ${o.floorPrice.currency}`).join(" | ")}`);
console.log(`scraped ${offers.length} offers: ${offers.map(o => `${o.tokenName} : ${o.floorPrice.amount} ${o.floorPrice.currency}`).join(" | ")}`);
// get offersByUrl
console.log(`\n\n\n\nāœ… === OpenseaScraper.offersByUrl(url, resultSize) ===`);
resultSize = 3;
const url = "https://opensea.io/collection/sandbox?search[sortAscending]=true&search[sortBy]=PRICE&search[stringTraits][0][name]=Type&search[stringTraits][0][values][0]=Land&search[toggles][0]=BUY_NOW";
const offersByUrl = await OpenseaScraper.offersByUrl(url, resultSize);
console.log(`scraped ${offersByUrl.length} offers: ${offersByUrl.map(o => `${o.tokenName} : ${o.floorPrice.amount} ${o.floorPrice.currency}`).join(" | ")}`);
// scrape rankings => https://opensea.io/rankings?sortBy=total_volume

@@ -34,0 +41,0 @@ console.log(`\n\n\n\nāœ… === OpenseaScraper.rankings(nPages) ===`);

2

package.json
{
"name": "opensea-scraper",
"version": "3.0.0",
"version": "3.0.1",
"description": "Scraping accurate floor prices from opensea, because the API returns inacurate floor prices.",

@@ -5,0 +5,0 @@ "main": "src/index.js",

@@ -19,3 +19,2 @@ # Opensea Scraper

```js

@@ -37,3 +36,5 @@ const OpenseaScraper = require("opensea-scraper");

// traditionally being a lot cheaper
const floorPriceByUrl = await OpenseaScraper.floorPriceByUrl("https://opensea.io/collection/sandbox?search[sortAscending]=true&search[sortBy]=PRICE&search[stringTraits][0][name]=Type&search[stringTraits][0][values][0]=Land&search[toggles][0]=BUY_NOW");
const floorPriceByUrl = await OpenseaScraper.floorPriceByUrl(
"https://opensea.io/collection/sandbox?search[sortAscending]=true&search[sortBy]=PRICE&search[stringTraits][0][name]=Type&search[stringTraits][0][values][0]=Land&search[toggles][0]=BUY_NOW"
);

@@ -45,2 +46,9 @@ // get offers from opensea. Each offer holds not only the floor price but also the tokenId.

// get offers from opensea using a custom link. Each offer holds not only the floor price but also the tokenId.
// the resultSize is the number of offers you want to fetch.
const resultSize = 10;
const url =
"https://opensea.io/collection/sandbox?search[sortAscending]=true&search[sortBy]=PRICE&search[stringTraits][0][name]=Type&search[stringTraits][0][values][0]=Land&search[toggles][0]=BUY_NOW";
const offers = await OpenseaScraper.offersByUrl(url, resultSize);
// scrape all slugs, names and ranks from the top collections from the rankings page sorted by all time volume:

@@ -53,3 +61,5 @@ // => https://opensea.io/rankings?sortBy=total_volume

## Debugging
If you want to debug, you can pass `"debug"` as last argument and puppeteer will not run in headless mode, so the browser will be launched and you can watch the scraper run. Debugging mode is enabled for the following functions:
- floorPrice

@@ -72,3 +82,5 @@ - floorPriceByUrl

## Script to fetch Floor Price from API
**āš  Important Note**: floor prices fetched with this method are not accurate (not in real time).
```js

@@ -82,3 +94,3 @@ const axios = require("axios");

return response.data.collection.stats.floor_price;
} catch(err) {
} catch (err) {
console.log(err);

@@ -99,3 +111,3 @@ return undefined;

# Python Alternative
ā„¹ if you want a solution to scrape floor prices without using puppeteer, take a look at this python solution: https://gist.github.com/dcts/a1b689b88e61fe350a446a5799209c9b

@@ -5,3 +5,3 @@ const axios = require("axios");

* => api.opensea.io/collection/{slug}
* no scraping is involved here
* no puppeteer is involved here
*/

@@ -35,3 +35,2 @@ const basicInfo = async (slug) => {

// HELPER FUNCTIONS FOR ScrapeOpensea.basicInfo()
function _getName(collectionObj) {

@@ -38,0 +37,0 @@ try {

@@ -15,3 +15,2 @@ // puppeteer-extra is a drop-in replacement for puppeteer,

const scrapeFloorPrice = async (slug, mode = "headless") => {
// puppeteer usage as normal
const browser = await puppeteer.launch({

@@ -48,3 +47,3 @@ headless: mode === "debug" ? false : true,

// thats why we need to minimize get the lowest value
// IMPORTANT: spread operator is needed for Math.min() to work with arrays
// REMARK: do not remove spread operator, see explenation here: https://dev.to/thebronxsystem/math-min-array-needs-spread-operator-1oe7
const floorPrice = Math.min(...floorPrices);

@@ -51,0 +50,0 @@ return {

@@ -19,3 +19,2 @@ // puppeteer-extra is a drop-in replacement for puppeteer,

const floorPriceByUrl = async (url, mode = "headless") => {
// puppeteer usage as normal
const browser = await puppeteer.launch({

@@ -22,0 +21,0 @@ headless: mode === "debug" ? false : true,

@@ -9,5 +9,14 @@ // puppeteer-extra is a drop-in replacement for puppeteer,

/**
*
* scrapes opensea offers for a given collection.
* Offers hold additional information, not only the floor price,
* example offer object:
* {
* floorPrice: {
* amount: 1.2,
* currency: "ETH"
* },
* name: "cool cat #231",
* tokenId: 234
* }
*/
// const offers = async (slug, resultSize, mode = "headless") => {
const offers = async (slug, resultSize = 10, mode = "headless") => {

@@ -24,4 +33,4 @@ const browser = await puppeteer.launch({

// EXPOSE ALL HELPER FUNCTIONS
await page.addScriptTag({path: "./src/helpers/offersHelperFunctions.js"});
// expose all helper functions
await page.addScriptTag({path: require.resolve("../helpers/offersHelperFunctions.js")});

@@ -28,0 +37,0 @@ // scrape offers until target resultsize reached or bottom of page reached

@@ -12,6 +12,2 @@ // puppeteer-extra is a drop-in replacement for puppeteer,

* (by default only scrape 1 page = 100 collections)
* timeout = how long to wait for page content, in ms.
* (default 3 seconds = 3000 ms). Higher number makes algorithm slower,
* but might help when having a slower connection
* logs = displays status report to console if true
* mode = "headless" or "debug".

@@ -35,7 +31,5 @@ */

// EXPOSE ALL HELPER FUNCTIONS
logs && console.log("...exposing helper functions through script tag")
await page.addScriptTag({path: "./src/helpers/rankingsHelperFunctions.js"});
// SCROLL TO BOTTOM AND FETCH COLLECTIONS
logs && console.log("...scrolling to bottom and fetching collections.");

@@ -62,5 +56,4 @@ let dict = await scrollToBottomAndFetchCollections(page);

/**
* HELPER FUNCTIONS
* Helper Functions for OpenseaScraper.rankings()
*/

@@ -67,0 +60,0 @@ async function clickNextPageButton(page) {

@@ -6,3 +6,5 @@ const basicInfo = require("./functions/basicInfo.js");

const offers = require("./functions/offers.js");
const offersByUrl = require("./functions/offersByUrl.js");
const OpenseaScraper = {

@@ -14,2 +16,3 @@ basicInfo,

offers,
offersByUrl,
};

@@ -16,0 +19,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with āš”ļø by Socket Inc