Socket
Socket
Sign inDemoInstall

twitter-buddy

Package Overview
Dependencies
116
Maintainers
1
Versions
5
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 1.0.2 to 1.0.3

lib/constant.js

70

bin/cli.js

@@ -8,13 +8,22 @@ #!/usr/bin/env node

try{
argv = {
...argv,
cli: true,
};
argv.cli = true;
argv.username = argv['login-username'];
argv.password = argv['login-password']
await twitterBot(argv)._input({
let result = await twitterBot(argv)._input({
type: 'scrape',
action: argv._[0],
input: argv.input,
input: argv.id,
number: argv.number,
})
});
if (result.json){
console.log(`JSON file saved: ${process.cwd()}/${result.json}`);
}
if (result.csv){
console.log(`CSV file saved: ${process.cwd()}/${result.csv}`);
}
if (result.message){
console.log(result.message);
}
} catch(error){

@@ -27,7 +36,7 @@ console.log(error);

.usage('Usage: $0 <command> [options]')
.example(`twitter-buddy followers -u TWITTER_USERNAME -p TWITTER_PASSWORD -input TWITTER_USERNAME -n 100`)
.example(`twitter-buddy retweetedBy -u TWITTER_USERNAME -p TWITTER_PASSWORD -input TWEET_ID -n 100`)
.example(`twitter-buddy followers TWITTER_USERNAME -u TWITTER_USERNAME -p TWITTER_PASSWORD -n 100`)
.example(`twitter-buddy retweetedBy TWEET_ID -u TWITTER_USERNAME -p TWITTER_PASSWORD -n 100`)
.command(
"retweetedBy",
"scrape retweetedBy data from a tweet. Maximum you can scrape 97 inputs",
"retweetedBy [id]",
"users who retweeted specific tweet",
{},

@@ -39,4 +48,4 @@ (argv) => {

.command(
"likedBy",
"scrape likedBy data from a tweet. Maximum you can scrape 100 inputs",
"likedBy [id]",
"users who liked specific tweet",
{},

@@ -48,4 +57,4 @@ (argv) => {

.command(
"conversation",
"scrape comments data from a tweet. Maximum you can scrape Unlimited inputs",
"conversation [id]",
"tweet comments",
{},

@@ -57,4 +66,4 @@ (argv) => {

.command(
"followers",
"scrape followers data from a twitter username. Maximum you can scrape Unlimited inputs",
"followers [id]",
"user followers",
{},

@@ -66,4 +75,4 @@ (argv) => {

.command(
"tweets",
"scrape tweets data from a twitter username. Maximum you can scrape Unlimited inputs",
"tweets [id]",
"user tweets",
{},

@@ -75,4 +84,4 @@ (argv) => {

.command(
"favorites",
"scrape favorites data from a twitter username. Maximum you can scrape Unlimited inputs",
"favorites [id]",
"user favorites/likes",
{},

@@ -88,20 +97,14 @@ (argv) => {

},
'username': {
'login-username': {
alias: 'u',
default: '',
type: 'string',
describe: "Your twitter username. In order to use this scraper you need to sign in by using your twitter account. This needs to be done only once"
describe: "Your twitter username"
},
'password': {
'login-password': {
alias: 'p',
default: '',
type: 'string',
describe: "Your twitter password. In order to use this scraper you need to sign in by using your twitter account. This needs to be done only once"
describe: "Your twitter password"
},
'input': {
alias: 'i',
default: '',
type: 'string',
describe: "Scrape from. For example if you need to scrape followers from twitter profile 'realDonaldTrump' then enter 'realDonaldTrump'. If you need retweetedBy then enter tweet id '21312312321312313'"
},
'number':{

@@ -123,4 +126,9 @@ alias: 'n',

},
'timeout':{
default: 0,
type: 'integer',
describe: 'If the "rate limit" error received or emtpy result then you can try to set {timeout}. Timeout is in milliseconds: 1000 = 1 second'
},
})
.demandCommand()
.argv

@@ -13,10 +13,10 @@ 'use strict'

const os = require('os');
const events = require("events");
const EventEmitter = require("events");
const Json2csvParser = require("json2csv").Parser;
const { Readable } = require('stream');
const CONST = require('./constant');
const followersParser = new Json2csvParser();
class TwitterBot extends Readable{
constructor({ username, password, proxy, number, cli, session_path, csv, json, stream}){
class TwitterBot extends EventEmitter{
constructor({ username, password, proxy, number, cli, session_path, csv, json, stream, id, event, timeout }){
super()

@@ -35,4 +35,6 @@ this._steam = stream || false;

this._collector = {};
this._json2csvParser = {};
this._cursor = '';
this._session = false;
this._event = false || event;
this._session_file_path = session_path || os.tmpdir();

@@ -45,6 +47,5 @@ this._graphqlEndPoints = {

this._json = json || false;
this._timeout = 0 || timeout;
this._spinner = cli ? ora('Twitter Scraper Started').start() : '';
this._events = new events.EventEmitter();
}
_read() {}

@@ -94,3 +95,9 @@ get _csrftoken(){

}
resolve(response);
if (this._timeout){
setTimeout(() => {
resolve(response);
},this._timeout)
}else{
resolve(response);
}
} catch(error){

@@ -296,3 +303,8 @@ if (error.name === 'StatusCodeError'){

}
_setCsvParser(action, init_seacrch){
if (!this._event && this._csv && CONST[`csv_${action}`]){
return this._json2csvParser[`${action}_${init_seacrch}`] = new Json2csvParser({ fields: CONST[`csv_${action}`] });
}
}
_input({ type, action, input, number }){

@@ -304,3 +316,7 @@ return new Promise( async(resolve, reject) => {

} catch(error){
return reject(error);
if (this._event){
return this.emit('error', error);
}else{
return reject(error);
}
}

@@ -313,3 +329,7 @@ }

}
return reject({errors:[{ message: `In order to use this scraper you need to authorize your account`}]});
if (this._event){
return this.emit('error', {errors:[{ message: `In order to use this scraper you need to authorize your account`}]});
}else{
return reject({errors:[{ message: `In order to use this scraper you need to authorize your account`}]});
}
}

@@ -320,2 +340,3 @@ let init_seacrch = Date.now();

this._number = number;
this._setCsvParser(action, init_seacrch);
switch(type){

@@ -353,17 +374,34 @@ case 'scrape':

default:
return reject({errors:[{ message: `Unknown action`}]});
if (this._event){
return this.emit('error', {errors:[{ message: `Unknown action`}]});
}else{
return reject({errors:[{ message: `Unknown action`}]});
}
}
// Removing collector storage and cleaning cursor
if (this._csv){
Bluebird.fromCallback(cb => fs.writeFile(`${action}:${init_seacrch}:${input}.csv`, followersParser.parse(result), cb))
}
if (this._json){
Bluebird.fromCallback(cb => fs.writeFile(`${action}:${init_seacrch}:${input}.json`, JSON.stringify(result), cb))
}
resolve(result)
if (this._cli){
this._spinner.stop();
}
this._cursor = '';
delete this._collector[`${action}:${init_seacrch}:${input}`];
if (!this._event){
if (result.length>=1){
if (this._csv){
this._csv_path = `${action}_${init_seacrch}_${input}.csv`;
await Bluebird.fromCallback(cb => fs.writeFile(`${this._csv_path}`, this._json2csvParser[`${action}_${init_seacrch}`].parse(result), cb))
}
if (this._json){
this._json_path = `${action}_${init_seacrch}_${input}.json`;
await Bluebird.fromCallback(cb => fs.writeFile(`${this._json_path}`, JSON.stringify(result), cb))
}
}
if (this._cli){
this._spinner.stop();
}
return resolve({
collector: result,
...(this._json_path ? { json: this._json_path } : { }),
...(this._csv_path ? { csv: this._csv_path } : { }),
...(result.length === 0 ? { message: `Nothing was scraped. Probably rate limit was reached, you can wait a little and try again and set higher timeout for example 3000`} : {})
})
}
this.emit('done')
} catch(error){

@@ -373,7 +411,15 @@ if (this._cli){

}
return reject(error)
if (this._event){
this.emit('error', error);
}else{
reject(error)
}
}
break;
default:
return reject({errors:[{ message: `Wrong action`}]});
if (this._event){
this.emit('error', {errors:[{ message: `Wrong action`}]});
}else{
reject({errors:[{ message: `Wrong action`}]});
}
}

@@ -448,7 +494,18 @@ })

}
collector.push(body.globalObjects.users[key])
if (this._event){
this.emit('data', body.globalObjects.users[key])
collector.push('');
}else{
collector.push(body.globalObjects.users[key]);
}
}
resolve(collector);
if (!this._event){
resolve(collector);
}
} catch(error){
reject(error)
if (this._event){
this.emit('error', error)
}else{
reject(error)
}
}

@@ -466,7 +523,18 @@ })

}
collector.push(body.globalObjects.users[key])
if (this._event){
this.emit('data', body.globalObjects.users[key])
collector.push('');
}else{
collector.push(body.globalObjects.users[key]);
}
}
resolve(collector);
if (!this._event){
resolve(collector);
}
} catch(error){
reject(error)
if (this._event){
this.emit('error', error)
}else{
reject(error)
}
}

@@ -490,16 +558,30 @@ })

body.globalObjects.tweets[key].created_at_mls = new Date(body.globalObjects.tweets[key].created_at).getTime();
collector.push(body.globalObjects.tweets[key])
body.globalObjects.tweets[key].source = body.globalObjects.tweets[key].source.split('>')[1].split('<')[0];
if (this._event){
this.emit('data', body.globalObjects.tweets[key])
collector.push('');
}else{
collector.push(body.globalObjects.tweets[key]);
}
}
this._cursor = body.timeline.instructions[0].addEntries.entries[len-1].content.operation.cursor.value;
} catch(error){
if (error instanceof TypeError){
error = `Nothing to scrape`
}
if (this._event){
this.emit('error', error);
}
break;
}
}
if (this._cli){
this._spinner.text = 'Sorting conversation data by date';
if (!this._event){
if (this._cli){
this._spinner.text = 'Sorting conversation data by date';
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
})

@@ -535,16 +617,30 @@ }

body.globalObjects.tweets[key].created_at_mls = new Date(body.globalObjects.tweets[key].created_at).getTime();
collector.push(body.globalObjects.tweets[key])
body.globalObjects.tweets[key].source = body.globalObjects.tweets[key].source.split('>')[1].split('<')[0];
if (this._event){
this.emit('data', body.globalObjects.tweets[key])
collector.push('');
}else{
collector.push(body.globalObjects.tweets[key]);
}
}
this._cursor = body.timeline.instructions[0].addEntries.entries[len-1].content.operation.cursor.value;
} catch(error){
if (error instanceof TypeError){
error = `Nothing to scrape`
}
if (this._event){
this.emit('error', error);
}
break;
}
}
if (this._cli){
this._spinner.text = 'Sorting user tweets by date';
if (!this.event){
if (this._cli){
this._spinner.text = 'Sorting user tweets by date';
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
})

@@ -580,16 +676,30 @@ }

body.globalObjects.tweets[key].created_at_mls = new Date(body.globalObjects.tweets[key].created_at).getTime();
collector.push(body.globalObjects.tweets[key])
body.globalObjects.tweets[key].source = body.globalObjects.tweets[key].source.split('>')[1].split('<')[0];
if (this._event){
this.emit('data', body.globalObjects.tweets[key])
collector.push('');
}else{
collector.push(body.globalObjects.tweets[key]);
}
}
this._cursor = body.timeline.instructions[0].addEntries.entries[len-1].content.operation.cursor.value;
} catch(error){
if (error instanceof TypeError){
error = `Nothing to scrape`
}
if (this._event){
this.emit('error', error);
}
break;
}
}
if (this._cli){
this._spinner.text = 'Sorting user favorites by date';
if (!this._event){
if (this._cli){
this._spinner.text = 'Sorting user favorites by date';
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
})

@@ -653,3 +763,3 @@ }

if (this._steam){
this.push( null );
//this.push( null );
}

@@ -659,5 +769,7 @@ break collectItems;

body.users[i].created_at_mls = new Date(body.users[i].created_at).getTime();
collector.push(body.users[i]);
if (this._steam){
this.push(body.users[i].screen_name)
if (this._event){
this.emit('data', body.users[i])
collector.push('');
}else{
collector.push(body.users[i]);
}

@@ -667,15 +779,20 @@ }

} catch(error){
if (this._steam){
this.push( null );
if (error instanceof TypeError){
error = `Nothing to scrape`
}
if (this._event){
this.emit('error', error);
}
break;
}
}
if (this._cli){
this._spinner.text = 'Sorting user followers by following date';
if (!this._event){
if (this._cli){
this._spinner.text = 'Sorting user followers by following date';
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
}
collector.sort((a,b) => {
return b.created_at_mls - a.created_at_mls;
});
resolve(collector);
})

@@ -720,2 +837,3 @@ }

let response = await this._graphql(username, `${this._graphqlEndPoints['UserByScreenName']}/UserByScreenName`);
if (!response.body.data.user){

@@ -722,0 +840,0 @@ return reject({errors:[{ message: 'Not Found'}]})

@@ -16,3 +16,3 @@ 'use strict';

module.exports = vending;
{
"name": "twitter-buddy",
"version": "1.0.2",
"version": "1.0.3",
"description": "Twitter Scraper (Web Based API). You can scrape followers, tweets, favorites from the user profile and retweeted by, liked by and comments from a tweet.",

@@ -5,0 +5,0 @@ "main": "lib/index.js",

@@ -12,2 +12,3 @@

## Features
* You can use it as module or in terminal
* Scraper **followers, tweets, favorites** from a twitter username(public profile only)

@@ -18,5 +19,10 @@ * Scrape **retweeted by, liked by, comments** from a tweet id

**Followers List**
![alt text]()
**Retweeted By Liest**
![alt text]()
![twitter followers list](https://i.imgur.com/HveUgsO.png)
**Tweets List**
![twitter tweets list](https://i.imgur.com/YTlfZVt.png)
**Comments List**
![twitter comments list](https://i.imgur.com/XOozpMl.png)
**Liked By List**
![twitter comments list](https://i.imgur.com/zFPeFgI.png)
and more

@@ -29,3 +35,3 @@ **Limits:**

**Possible errors**
* If there will be let me know
* you can receive "rate limit" error, in that case wait and increase timeout

@@ -46,3 +52,3 @@ ## Installation

**Terminal**
### In Terminal

@@ -55,58 +61,52 @@ ```sh

Commands:
twitter-buddy retweetedBy scrape retweetedBy data from a tweet. Maximum you can scrape 97 inputs
twitter-buddy likedBy scrape likedBy data from a tweet. Maximum you can scrape 100 inputs
twitter-buddy conversation scrape comments data from a tweet. Maximum you can scrape Unlimited inputs
twitter-buddy followers scrape followers data from a twitter username. Maximum you can scrape Unlimited inputs
twitter-buddy tweets scrape tweets data from a twitter username. Maximum you can scrape Unlimited inputs
twitter-buddy favorites scrape favorites data from a twitter username. Maximum you can scrape Unlimited inputs
twitter-buddy retweetedBy [id] users who retweeted specific tweet
twitter-buddy likedBy [id] users who liked specific tweet
twitter-buddy conversation [id] tweet comments
twitter-buddy followers [id] user followers
twitter-buddy tweets [id] user tweets
twitter-buddy favorites [id] user favorites/likes
Options:
--help, -h help [boolean]
--version Show version number [boolean]
--username, -u Your twitter username. In order to use this scraper you need
to sign in by using your twitter account. This needs to be
done only once [string] [default: ""]
--password, -p Your twitter password. In order to use this scraper you need
to sign in by using your twitter account. This needs to be
done only once [string] [default: ""]
--input, -i Scrape from. For example if you need to scrape followers from
'twitter' then enter 'twitter'. If you need retweetedBy then
enter tweet id '21312312321312313' [string] [default: ""]
--number, -n Number of inputs to scrape [default: 20]
--csv Save to a CSV file? [boolean] [default: true]
--json Save to a JSON file? [boolean] [default: false]
--help, -h help [boolean]
--version Show version number [boolean]
--login-username, -u Your twitter username [string] [default: ""]
--login-password, -p Your twitter password [string] [default: ""]
--number, -n Number of inputs to scrape [default: 20]
--csv Save to a CSV file? [boolean] [default: true]
--json Save to a JSON file? [boolean] [default: false]
--timeout If the "rate limit" error received or emtpy result then
you can try to set {timeout}. Timeout is in
milliseconds: 1000 = 1 second [default: 0]
Examples:
twitter-buddy followers -u TWITTER_USERNAME -p TWITTER_PASSWORD -input TWITTER_USERNAME -n 100
twitter-buddy retweetedBy -u TWITTER_USERNAME -p TWITTER_PASSWORD -input TWEET_ID -n 100
twitter-buddy followers TWITTER_USERNAME -u TWITTER_USERNAME -p
TWITTER_PASSWORD -n 100
twitter-buddy retweetedBy TWEET_ID -u TWITTER_USERNAME -p TWITTER_PASSWORD
-n 100
```
**Example 1**
Scrape 1000 followers from twitter profile 'realDonaldTrump' and save everything to a CSV file
Scrape 2000 followers from twitter profile 'realDonaldTrump' and save everything to a CSV file
```sh
$ twitter-buddy followers -u TWITTER_USERNAME -p TWITTER_PASSWORD -input realDonaldTrump -n 1000
$ twitter-buddy followers realDonaldTrump -u TWITTER_USERNAME -p TWITTER_PASSWORD -n 2000
```
**The file will be saved in a folder from which you run the script:
folowers:1552945544582:realDonaldTrump.csv**
**CSV file saved: /{FOLDER_PATH}/followers_1571390379072_realDonaldTrump.csv**
**Example 2**
Scrape 1000 tweets from twitter profile 'realDonaldTrump'.
```sh
$ twitter-buddy tweets -u TWITTER_USERNAME -p TWITTER_PASSWORD -input realDonaldTrump -n 1000
$ twitter-buddy tweets realDonaldTrump -u TWITTER_USERNAME -p TWITTER_PASSWORD -n 1000
```
**The file will be saved in a folder from which you run the script:
tweets:1552945544582:realDonaldTrump.csv**
**CSV file saved: /{FOLDER_PATH}/tweets_1571390379072_realDonaldTrump.csv**
**Example 3**
Scrape 100 users who retweeted this tweet 'https://twitter.com/realDonaldTrump/status/1183796209942695938'.
```sh
$ twitter-buddy retweetedBy -u TWITTER_USERNAME -p TWITTER_PASSWORD -input 1183796209942695938 -n 100
$ twitter-buddy retweetedBy 1183796209942695938 -u TWITTER_USERNAME -p TWITTER_PASSWORD -n 100
```
**The file will be saved in a folder from which you run the script:
retweetedBy:1552945544582:1183796209942695938.csv**
**CSV file saved: /{FOLDER_PATH}/retweetedBy_1571390379072_1183796209942695938.csv**
**Module**
## Module
### Promise
```

@@ -177,2 +177,32 @@ const twitterScraper = require('twitter-buddy');

### Event
```
const twitterScraper = require('twitter-buddy');
let bot = new twitterScraper({
username:'YOUR_TWITTER_USERNAME',
password:'YOUR_TWITTER_PASSWORD',
event: true // Enable event emitter
//session_path: 'CUSTOM_SESSION_PATH',
//proxy: 'HTTPS PROXY'
});
bot._input({
type: 'scrape',
action: 'followers',
input: 'realDonaldTrump',
number: 1000,
});
bot.on('data', data => {
// data
})
bot.on('error', error => {
// error
})
```
License

@@ -179,0 +209,0 @@ ----

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc