Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

hercai

Package Overview
Dependencies
Maintainers
2
Versions
34
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

hercai - npm Package Compare versions

Comparing version 11.1.3 to 12.0.0

2

cli.js

@@ -8,3 +8,3 @@ #!/usr/bin/env node

if(!content || content == undefined || content == null)return console.error("\x1b[38;5;160mPlease specify a question!\x1B[0m ")
try{var api = await axios.get("https://hercai.onrender.com/v3-beta/hercai?question="+encodeURI(content),{headers: {"content-type": "application/json"}})
try{var api = await axios.get("https://hercai.onrender.com/v3/hercai?question="+encodeURI(content),{headers: {"content-type": "application/json"}})
var data = api.data;

@@ -11,0 +11,0 @@ return console.log("\x1b[38;5;46mHerc.ai:\x1b[33m "+data.reply+"\x1B[0m")

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const axios = require('axios');
const baseurl = "https://hercai.onrender.com/v3-beta/hercai";
const baseurl = "https://hercai.onrender.com/v3/hercai";

@@ -24,3 +24,6 @@

* The Question You Want to Ask Artificial Intelligence.
* @param {string} model "v3-beta" (GPT-4)
* @param {string} model "v3" (GPT-4)
* @param {string} model "v3-32k" (GPT-4-32k)
* @param {string} model "turbo" (GPT-3.5 Turbo)
* @param {string} model "turbo-16k" (GPT-3.5 Turbo-16k)
* @param {string} model "gemini" (Google Gemini-Pro)

@@ -33,4 +36,4 @@ * @param {string} content The Question You Want to Ask Artificial Intelligence.

*/
async question({model = "v3-beta",content}){
if(!["v3-beta","gemini"].some(ind => model == ind)) model = "v3-beta";
async question({model = "v3",content}){
if(!["v3","gemini","v3-32k","turbo","turbo-16k"].some(ind => model == ind)) model = "v3";
if(!content || content == undefined || content == null)throw new Error("Please specify a question!");

@@ -54,5 +57,6 @@ try{

* Tell Artificial Intelligence What You Want to Draw.
* @param {string} model "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia"
* @param {string} model "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia", "simurg", "animefy", "raava", "shonin"
* @param {string} prompt Tell Artificial Intelligence What You Want to Draw.
* @example client.drawImage({model:"v1",prompt:"anime girl"})
* @param {string} negative_prompt It includes the features that you do not want to be included in the output you want from artificial intelligence.
* @example client.drawImage({model:"v3",prompt:"anime girl"})
* @type {string} Tell Artificial Intelligence What You Want to Draw.

@@ -62,7 +66,8 @@ * @returns {Hercai}

*/
async drawImage({model = "v3",prompt}){
if(!["v1","v2","v2-beta","v3","lexica","prodia"].some(ind => model == ind)) model = "prodia";
async drawImage({model = "v3",prompt,negative_prompt}){
if(!["v1","v2","v2-beta","v3","lexica","prodia","simurg","animefy","raava","shonin"].some(ind => model == ind)) model = "prodia";
if(!prompt || prompt == undefined || prompt == null)throw new Error("Please specify a prompt!");
if(!negative_prompt || negative_prompt == undefined || negative_prompt == null) negative_prompt = "";
try{
var api = await axios.get(`https://hercai.onrender.com/${model}/text2image`+"?prompt="+encodeURI(prompt),{
var api = await axios.get(`https://hercai.onrender.com/${model}/text2image`+"?prompt="+encodeURI(prompt)+"&negative_prompt="+encodeURI(negative_prompt),{
headers: {

@@ -69,0 +74,0 @@ "content-type": "application/json",

{
"name": "hercai",
"version": "11.1.3",
"version": "12.0.0",
"description": "A powerful library for interacting with the Herc.ai API",

@@ -92,3 +92,7 @@ "main": "index.js",

"lodash",
"stable"
"stable",
"anime",
"bes",
"fivesobes",
"five"
],

@@ -95,0 +99,0 @@ "repository": {

@@ -39,3 +39,3 @@ <p align="center"> <a href="#"> <img width=500 src="https://raw.githubusercontent.com/Bes-js/herc.ai/main/hercai-logo.png"></a></p>

> **Question API; [https://hercai.onrender.com/v3-beta/hercai?question=](https://hercai.onrender.com/v3-beta/hercai?question=)**
> **Question API; [https://hercai.onrender.com/v3/hercai?question=](https://hercai.onrender.com/v3/hercai?question=)**

@@ -50,5 +50,5 @@ **Example Question For CommonJS;**

/* Available Models */
/* "v3-beta" , "gemini" */
/* Default Model; "v3-beta" */
herc.question({model:"v3-beta",content:"hi, how are you?"}).then(response => {
/* "v3" , "v3-32k" , "turbo" , "turbo-16k" , "gemini" */
/* Default Model; "v3" */
herc.question({model:"v3",content:"hi, how are you?"}).then(response => {
console.log(response.reply);

@@ -71,5 +71,5 @@ /* The module will reply based on the message! */

/* Available Models */
/* "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia" */
/* Default Model; "v2" */
herc.drawImage({model:"v2",prompt:"anime girl"}).then(response => {
/* "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia", "simurg", "animefy", "raava", "shonin" */
/* Default Model; "v3" */
herc.drawImage({model:"v3",prompt:"anime girl",negative_prompt:""}).then(response => {
console.log(response.url);

@@ -88,3 +88,3 @@ /* The module will reply based on the prompt! */

/* Question Example For TypeScript */
herc.question({model:"v3-beta",content:"hi, how are you?"})
herc.question({model:"v3",content:"hi, how are you?"})
.then((response:QuestionData) => {

@@ -95,3 +95,3 @@ console.log(response.reply);

/* DrawImage Example For TypeScript */
herc.drawImage({model:"v2",prompt:"anime girl"})
herc.drawImage({model:"v3",prompt:"anime girl",negative_prompt:""})
.then((response:DrawImageData) => {

@@ -98,0 +98,0 @@ console.log(response.url);

@@ -13,3 +13,6 @@ export = Hercai;

* The Question You Want to Ask Artificial Intelligence.
* @param {string} model "v3-beta" (GPT-4)
* @param {string} model "v3" (GPT-4)
* @param {string} model "v3-32k" (GPT-4-32k)
* @param {string} model "turbo" (GPT-3.5 Turbo)
* @param {string} model "turbo-16k" (GPT-3.5 Turbo-16k)
* @param {string} model "gemini" (Google Gemini-Pro)

@@ -22,3 +25,3 @@ * @param {string} content The Question You Want to Ask Artificial Intelligence.

*/
question(object:{model:"v3-beta"|"gemini",content:string}):Promise<{content:string,reply:string}>;
question(object:{model:"v3"|"v3-32k"|"turbo"|"turbo-16k"|"gemini",content:string}):Promise<{content:string,reply:string}>;

@@ -28,5 +31,6 @@

* Tell Artificial Intelligence What You Want to Draw.
* @param {string} model "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia"
* @param {string} model "v1" , "v2" , "v2-beta" , "v3" (DALL-E) , "lexica" , "prodia", "simurg", "animefy", "raava", "shonin"
* @param {string} prompt Tell Artificial Intelligence What You Want to Draw.
* @example client.drawImage({model:"v1",prompt:"anime girl"})
* @param {string} negative_prompt It includes the features that you do not want to be included in the output you want from artificial intelligence.
* @example client.drawImage({model:"v3",prompt:"anime girl"})
* @type {string} Tell Artificial Intelligence What You Want to Draw.

@@ -36,3 +40,3 @@ * @returns {Hercai}

*/
drawImage(object:{model:"v1"|"v2"|"v2-beta"|"v3"|"lexica"|"prodia",prompt:string}):Promise<{model:string,prompt:string,url:string}>;
drawImage(object:{model:"v1"|"v2"|"v2-beta"|"v3"|"lexica"|"prodia"|"simurg"|"animefy"|"raava"|"shonin",prompt:string,negative_prompt:string|""}):Promise<{model:string,prompt:string,url:string}>;

@@ -39,0 +43,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc