You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

neptune

Package Overview
Dependencies
Maintainers
2
Versions
73
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

neptune - pypi Package Compare versions

Comparing version
2.0.0a6
to
2.0.0a7
+1
-0
CHANGELOG.md

@@ -34,2 +34,3 @@ ## [UNRELEASED] neptune 2.0.0

- Added support for `NQL` `MATCHES` operator ([#1863](https://github.com/neptune-ai/neptune-client/pull/1863))
- Pagination respecting `limit` parameter and page size ([#1866](https://github.com/neptune-ai/neptune-client/pull/1866))

@@ -36,0 +37,0 @@ ### Fixes

+1
-1
Metadata-Version: 2.1
Name: neptune
Version: 2.0.0a6
Version: 2.0.0a7
Summary: Neptune Client

@@ -5,0 +5,0 @@ Home-page: https://neptune.ai/

@@ -92,3 +92,3 @@ [build-system]

readme = "README.md"
version = "2.0.0-alpha.6"
version = "2.0.0-alpha.7"
classifiers = [

@@ -95,0 +95,0 @@ "Development Status :: 5 - Production/Stable",

@@ -19,2 +19,3 @@ #

import abc
import itertools
from dataclasses import dataclass

@@ -24,4 +25,4 @@ from typing import (

Callable,
Iterable,
Iterator,
List,
Optional,

@@ -51,3 +52,5 @@ TypeVar,

getter: Paginatable,
extract_entries: Callable[[T], Iterable[Entry]],
extract_entries: Callable[[T], List[Entry]],
page_size: int = 50,
limit: Optional[int] = None,
**kwargs: Any,

@@ -58,7 +61,22 @@ ) -> Iterator[Entry]:

"""
data = getter(**kwargs, next_page=None)
yield from extract_entries(data)
counter = 0
data = getter(**kwargs, next_page=NextPage(limit=page_size, next_page_token=None))
results = extract_entries(data)
if limit is not None:
counter = len(results[:limit])
yield from itertools.islice(results, limit)
while data.next_page is not None and data.next_page.next_page_token is not None:
data = getter(**kwargs, next_page=data.next_page)
yield from extract_entries(data)
to_fetch = page_size
if limit is not None:
if counter >= limit:
break
to_fetch = min(page_size, limit - counter)
data = getter(**kwargs, next_page=NextPage(limit=to_fetch, next_page_token=data.next_page.next_page_token))
results = extract_entries(data)
if limit is not None:
counter += len(results[:to_fetch])
yield from itertools.islice(results, to_fetch)