Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

git-bob

Package Overview
Dependencies
Maintainers
1
Versions
65
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

git-bob - npm Package Compare versions

Comparing version
0.26.0
to
0.27.0
+1
-1
PKG-INFO
Metadata-Version: 2.4
Name: git-bob
Version: 0.26.0
Version: 0.27.0
Summary: git-bob uses AI to solve Github-issues. It runs inside the Github CI, no need to install anything on your computer.

@@ -5,0 +5,0 @@ Home-page: https://github.com/haesleinhuepf/git-bob

@@ -68,2 +68,3 @@ [metadata]

e-infra_cz = git_bob._endpoints:prompt_e_infra_cz
scads = git_bob._endpoints:prompt_scads
git_bob.triggers =

@@ -70,0 +71,0 @@ review = git_bob._ai_github_utilities:review_pull_request

@@ -21,2 +21,3 @@ [console_scripts]

pixtral = git_bob._endpoints:prompt_mistral
scads = git_bob._endpoints:prompt_scads

@@ -23,0 +24,0 @@ [git_bob.triggers]

Metadata-Version: 2.4
Name: git-bob
Version: 0.26.0
Version: 0.27.0
Summary: git-bob uses AI to solve Github-issues. It runs inside the Github CI, no need to install anything on your computer.

@@ -5,0 +5,0 @@ Home-page: https://github.com/haesleinhuepf/git-bob

@@ -1,2 +0,2 @@

__version__ = "0.26.0"
__version__ = "0.27.0"

@@ -3,0 +3,0 @@ __all__ = (

@@ -114,7 +114,16 @@ """

# submit prompt
response = client.chat.completions.create(
model=model,
messages=message,
max_tokens=max_response_tokens,
)
if model.startswith("gpt-5"):
if max_response_tokens == 16384: # overwrite default becasue gpt-5 is more capable
max_response_tokens=128000
response = client.chat.completions.create(
model=model,
messages=message,
max_completion_tokens=max_response_tokens,
)
else:
response = client.chat.completions.create(
model=model,
messages=message,
max_tokens=max_response_tokens,
)

@@ -137,2 +146,14 @@ result = append_result(result, response.choices[0].message.content)

def prompt_scads(message: str, model="openai/gpt-oss-120b", image=None, max_accumulated_responses=10, max_response_tokens=128000, base_url=None, api_key=None):
import os
if base_url is None:
base_url = "hhttps://llm.scads.ai/v1"
if api_key is None:
api_key = os.environ.get("SCADS_API_KEY")
model = model.replace("scads:", "")
print("model", "_" + model[1:])
return prompt_openai(message, model=model, image=image, max_accumulated_responses=max_accumulated_responses, max_response_tokens=max_response_tokens, base_url=base_url, api_key=api_key)
def prompt_kisski(message: str, model=None, image=None, max_accumulated_responses=10, max_response_tokens=16384, base_url=None, api_key=None):

@@ -139,0 +160,0 @@ import os