New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

collect-feeds

Package Overview
Dependencies
Maintainers
1
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

collect-feeds - npm Package Compare versions

Comparing version 0.0.9 to 0.1.0

87

lib/find-feed.js

@@ -6,4 +6,4 @@ 'use strict';

const feedFinder = util.promisify(require('feed-finder'));
const {isArray, filterEmpties, unique} = require('collect-feeds/lib/utils.js');
const Queue = require('promise-queue');
const { isArray, filterEmpties, unique } = require('./utils.js');
const queue = new Queue(6, Infinity);

@@ -15,3 +15,4 @@ // Black List:

headers: {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:69.0) Gecko/20100101 Firefox/69.0'
'user-agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:69.0) Gecko/20100101 Firefox/69.0'
},

@@ -37,5 +38,7 @@ timeout: 10000 // 10s

];
knownFeedEndpoints = knownFeedEndpoints.concat(knownFeedEndpoints.map(v => {
return '/' + v;
}));
knownFeedEndpoints = knownFeedEndpoints.concat(
knownFeedEndpoints.map((v) => {
return '/' + v;
})
);

@@ -47,29 +50,35 @@ function findFeed(url) {

return Promise.all([queue.add(() => feedFinder(url, {knownFeedEndpoints, gotOptions})), queue.add(() => rssFinder({url, gotOptions}))])
.then(result => {
console.debug('\n----');
console.debug(result[0]);
console.debug(result[1]);
const feedUrls1 = result[0];
const feedUrls2 = result[1].feedUrls.map(v => v.url);
return Promise.all([
queue.add(() => feedFinder(url, { knownFeedEndpoints, gotOptions })),
queue.add(() => rssFinder({ url, gotOptions }))
]).then((result) => {
console.debug('\n----');
console.debug(result[0]);
console.debug(result[1]);
const feedUrls1 = result[0];
const feedUrls2 = result[1].feedUrls.map((v) => v.url);
if (feedUrls1[0] && feedUrls1[0] === feedUrls2[0]) {
return feedUrls2[0];
}
if (feedUrls1[0] && feedUrls1[0] === feedUrls2[0]) {
return feedUrls2[0];
}
for (let i = 0, len = feedUrls2.length; i < len; i++) {
const feedUrl = feedUrls2[i];
if (feedUrls1.includes(feedUrl)) {
return feedUrl;
}
for (let i = 0, { length } = feedUrls2; i < length; i++) {
const feedUrl = feedUrls2[i];
if (feedUrls1.includes(feedUrl)) {
return feedUrl;
}
}
const feedUrls = unique(filterEmpties(feedUrls2.concat(feedUrls1)));
const feedUrls = unique(filterEmpties(feedUrls2.concat(feedUrls1)));
if (feedUrls.length > 0) {
console.info(`Should check the url '${url}'. \n Found different feed URLs: \n - ${feedUrls.join('\n - ')}`);
}
if (feedUrls.length > 0) {
console.info(
`Should check the url '${url}'. \n Found different feed URLs: \n - ${feedUrls.join(
'\n - '
)}`
);
}
return feedUrls2[0] || feedUrls1[0] || null;
});
return feedUrls2[0] || feedUrls1[0] || null;
});
}

@@ -79,14 +88,16 @@

urls = isArray(urls) ? urls : [urls];
return Promise.all(urls.map(url => {
return findFeed(url)
.then(feedUrl => ({
url,
feedUrl
}))
.catch(error => ({
url,
feedUrl: null,
error
}));
}));
return Promise.all(
urls.map((url) => {
return findFeed(url)
.then((feedUrl) => ({
url,
feedUrl
}))
.catch((error) => ({
url,
feedUrl: null,
error
}));
})
);
}

@@ -93,0 +104,0 @@

@@ -5,3 +5,3 @@ #!/usr/bin/env node

const {resolve} = require('path');
const { resolve } = require('path');
const lockfile = require('proper-lockfile');

@@ -11,4 +11,4 @@ const fs = require('fs-extra');

const fileAppendQueue = new Queue(1, Infinity);
const {urlAlias} = require('collect-feeds/lib/utils.js');
const {findFeed} = require('./find-feed.js');
const { urlAlias } = require('./utils.js');
const { findFeed } = require('./find-feed.js');
const FOUND_TMP_FILE_PATH = resolve(process.cwd(), 'found.yml');

@@ -36,29 +36,31 @@ const FAILED_TMP_FILE_PATH = resolve(process.cwd(), 'failed.yml');

function readUrls(filepath) {
return fs.readFile(filepath)
.then(content => {
return content.toString().split('\n');
});
return fs.readFile(filepath).then((content) => {
return content.toString().split('\n');
});
}
function fetchFeeds(urls) {
return Promise.all(urls.map(url => {
if (!url) {
return Promise.resolve();
}
return Promise.all(
urls.map((url) => {
if (!url) {
return Promise.resolve();
}
return findFeed(url)
.then(feedUrl => {
if (feedUrl) {
return appendFound({alias: urlAlias(url), url, feedUrl})
.then(appendSkipList(url));
}
return findFeed(url)
.then((feedUrl) => {
if (feedUrl) {
return appendFound({ alias: urlAlias(url), url, feedUrl }).then(
appendSkipList(url)
);
}
return appendFailed(url, 'Feed URL not found')
.then(appendSkipList(url));
})
.catch(error => {
return appendFailed(url, error)
.then(appendSkipList(url));
});
}));
return appendFailed(url, 'Feed URL not found').then(
appendSkipList(url)
);
})
.catch((error) => {
return appendFailed(url, error).then(appendSkipList(url));
});
})
);
}

@@ -75,3 +77,4 @@

// With queue, maybe lockfile is not required
return fileAppendQueue.add(() => lockfile.lock(filepath, {retries: 5}))
return fileAppendQueue
.add(() => lockfile.lock(filepath, { retries: 5 }))
.then(() => {

@@ -92,3 +95,4 @@ console.info(`append to found list. ${feed.url}`);

return fileAppendQueue.add(() => lockfile.lock(filepath, {retries: 5}))
return fileAppendQueue
.add(() => lockfile.lock(filepath, { retries: 5 }))
.then(() => {

@@ -104,3 +108,4 @@ console.info(`append to failed list. ${url}`);

return fileAppendQueue.add(() => lockfile.lock(filepath, {retries: 5}))
return fileAppendQueue
.add(() => lockfile.lock(filepath, { retries: 5 }))
.then(() => {

@@ -123,8 +128,13 @@ console.info(`append to skip list. ${url}`);

return Promise.all([readUrls(URL_QUEUE_FILE_PATH), readUrls(SKIP_LIST_FILE_PATH)])
.then(result => {
return Promise.all([
readUrls(URL_QUEUE_FILE_PATH),
readUrls(SKIP_LIST_FILE_PATH)
])
.then((result) => {
const [urls, skips] = result;
return urls.filter(url => {
return !skips.includes(url);
}).slice(0, count);
return urls
.filter((url) => {
return !skips.includes(url);
})
.slice(0, count);
})

@@ -145,11 +155,12 @@ .then(fetchFeeds);

if (require.main === module) { // Called directly
if (require.main === module) {
// Called directly
const count = process.argv[2];
main(count)
.catch(error => {
exit(error);
});
} else { // Required as a module
main(count).catch((error) => {
exit(error);
});
} else {
// Required as a module
module.exports = main;
}

@@ -6,7 +6,7 @@ 'use strict';

unique: arr => [...new Set(arr)],
unique: (array) => [...new Set(array)],
filterEmpties: arr => arr.filter(v => Boolean(v)),
filterEmpties: (array) => array.filter((v) => Boolean(v)),
urlAlias: url => {
urlAlias: (url) => {
const regex = /https?:\/\/|www\.|\?.+|#.+|index\.html|\/$/g;

@@ -13,0 +13,0 @@ url = url.replace(regex, '');

{
"name": "collect-feeds",
"version": "0.0.9",
"version": "0.1.0",
"description": "A CLI for collect feed URLs.",

@@ -11,4 +11,4 @@ "main": "lib/index.js",

"demo": "collect-feeds",
"lint": "xo",
"lint:fix": "xo --fix",
"lint": "prettier --write . && xo",
"lint:fix": "prettier --write . && xo --fix",
"test": "mocha test/index.js",

@@ -20,6 +20,6 @@ "test:cov": "nyc npm run test",

"feed-finder": "github:dailyrandomphoto/feed-finder#my-master",
"fs-extra": "^8.1.0",
"fs-extra": "^9.0.1",
"promise-queue": "^2.2.5",
"proper-lockfile": "^4.1.1",
"rss-finder": "^2.1.2"
"rss-finder": "^2.1.4"
},

@@ -29,6 +29,6 @@ "devDependencies": {

"chai-as-promised": "^7.1.1",
"collect-feeds": "file:.",
"mocha": "^6.2.1",
"nyc": "^14.1.1",
"xo": "^0.25.3"
"mocha": "^8.1.1",
"nyc": "^15.1.0",
"prettier": "^2.0.5",
"xo": "^0.33.0"
},

@@ -48,3 +48,3 @@ "keywords": [

"engines": {
"node": ">=8.6.0"
"node": ">=10"
},

@@ -63,5 +63,7 @@ "author": "dailyrandomphoto <dailyrandomphoto@gmail.com> (https://www.dailyrandomphoto.com/)",

"space": 2,
"prettier": true,
"rules": {
"promise/prefer-await-to-then": 0,
"capitalized-comments": 0
"capitalized-comments": 0,
"unicorn/prefer-number-properties": 0
},

@@ -68,0 +70,0 @@ "overrides": [

@@ -6,4 +6,3 @@ # collect-feeds

[![Build Status][travis-image]][travis-url]
[![dependencies Status][dependencies-image]][dependencies-url]
[![devDependencies Status][devDependencies-image]][devDependencies-url]
[![code style: prettier][code-style-prettier-image]][code-style-prettier-url]

@@ -33,12 +32,10 @@ A CLI for collect feed URLs.

## License
Copyright (c) 2019 [dailyrandomphoto][my-url]. Licensed under the [MIT license][license-url].
Copyright (c) 2020 [dailyrandomphoto][my-url]. Licensed under the [MIT license][license-url].
[my-url]: https://github.com/dailyrandomphoto
[npm-url]: https://www.npmjs.com/package/collect-feeds
[travis-url]: https://travis-ci.org/dailyrandomphoto/collect-feeds
[coveralls-url]: https://coveralls.io/github/dailyrandomphoto/collect-feeds?branch=master
[license-url]: LICENSE
[dependencies-url]: https://david-dm.org/dailyrandomphoto/collect-feeds
[devDependencies-url]: https://david-dm.org/dailyrandomphoto/collect-feeds?type=dev
[code-style-prettier-url]: https://github.com/prettier/prettier
[npm-downloads-image]: https://img.shields.io/npm/dm/collect-feeds

@@ -48,4 +45,2 @@ [npm-version-image]: https://img.shields.io/npm/v/collect-feeds

[travis-image]: https://img.shields.io/travis/dailyrandomphoto/collect-feeds
[coveralls-image]: https://img.shields.io/coveralls/github/dailyrandomphoto/collect-feeds
[dependencies-image]: https://img.shields.io/david/dailyrandomphoto/collect-feeds
[devDependencies-image]: https://img.shields.io/david/dev/dailyrandomphoto/collect-feeds
[code-style-prettier-image]: https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc