You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

s3path

Package Overview
Dependencies
Maintainers
1
Versions
43
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

s3path - pypi Package Compare versions

Comparing version
0.6.1
to
0.6.2
+1
-1
PKG-INFO
Metadata-Version: 2.4
Name: s3path
Version: 0.6.1
Version: 0.6.2
Home-page: https://github.com/liormizr/s3path

@@ -5,0 +5,0 @@ Author: Lior Mizrahi

Metadata-Version: 2.4
Name: s3path
Version: 0.6.1
Version: 0.6.2
Home-page: https://github.com/liormizr/s3path

@@ -5,0 +5,0 @@ Author: Lior Mizrahi

@@ -8,3 +8,3 @@ """

__version__ = '0.6.1'
__version__ = '0.6.2'
__all__ = (

@@ -11,0 +11,0 @@ 'Path',

@@ -7,2 +7,3 @@ import sys

from itertools import chain
from collections import deque
from functools import lru_cache

@@ -158,2 +159,5 @@ from contextlib import suppress

return True
with suppress(KeyError):
return path._cache['is_dir']
resource, config = configuration_map.get_configuration(path)

@@ -165,3 +169,5 @@ bucket = resource.Bucket(path.bucket)

config=config)
return any(query)
is_dir = any(query)
path._cache['is_dir'] = is_dir
return is_dir

@@ -269,8 +275,2 @@

def listdir(path):
with scandir(path) as scandir_iter:
for entry in scandir_iter:
yield entry.name
def open(path, *, mode='r', buffering=-1, encoding=None, errors=None, newline=None):

@@ -343,6 +343,66 @@ resource, config = configuration_map.get_configuration(path)

)
except ClientError:
raise OSError(f'/{bucket_name}/{key_name}')
except Exception as error:
raise OSError(f'/{bucket_name}/{key_name}') from error
def walk(path, *, topdown=True, onerror=None, followlinks=False):
try:
if not exists(path):
raise FileNotFoundError(f'No such file or directory: {path}')
except FileNotFoundError as error:
if onerror is not None:
onerror(error)
return
stack = deque([path])
while stack:
top = stack.pop()
if isinstance(top, tuple):
yield top
continue
dirs = []
nondirs = []
walk_dirs = []
cont = False
with scandir(top) as scandir_iter:
scandir_iter = iter(scandir_iter)
while True:
try:
entry = next(scandir_iter)
is_dir = entry.is_dir()
if is_dir:
dirs.append(entry.name)
else:
nondirs.append(entry.name)
if not topdown and is_dir:
walk_dirs.append(top / entry.name)
except StopIteration:
break
except Exception as error:
if onerror is not None:
onerror(error)
cont = True
break
if cont:
continue
if topdown:
# Yield before sub-directory traversal if going top down
yield top, dirs, nondirs
# Traverse into sub-directories
for dirname in reversed(dirs):
new_path = top / dirname
stack.append(new_path)
else:
# Yield after sub-directory traversal if going bottom up
stack.append((top, dirs, nondirs))
# Traverse into sub-directories
for new_path in reversed(walk_dirs):
stack.append(new_path)
def _is_versioned_path(path):

@@ -349,0 +409,0 @@ return hasattr(path, 'version_id') and bool(path.version_id)

@@ -280,3 +280,13 @@ from __future__ import annotations

class S3Path(_PathNotSupportedMixin, PureS3Path, Path):
class _PathCacheMixin:
"""
This is a mixin class to cache the results and path state.
Note: this is experimental and will be more robust in the future.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._cache = {}
class S3Path(_PathNotSupportedMixin, _PathCacheMixin, PureS3Path, Path):
def stat(self, *, follow_symlinks: bool = True) -> accessor.StatResult:

@@ -435,4 +445,7 @@ """

self._absolute_path_validation()
for name in accessor.listdir(self):
yield self / name
with accessor.scandir(self) as scandir_iter:
for entry in scandir_iter:
path = self / entry.name
path._cache['is_dir'] = entry.is_dir()
yield path

@@ -450,2 +463,4 @@ def open(

self._absolute_path_validation()
if 'r' in mode and not self.exists():
raise FileNotFoundError(f'No such file or directory: {self}')
return accessor.open(

@@ -574,9 +589,10 @@ self,

def _scandir(self):
"""
Override _scandir so _Selector will rely on an S3 compliant implementation
"""
return accessor.scandir(self)
def walk(self, top_down: bool = True, on_error:bool = None, follow_symlinks: bool = False):
if follow_symlinks:
raise NotImplementedError(f'Setting follow_symlinks to {follow_symlinks} is unsupported on S3 service.')
sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks)
yield from accessor.walk(self, topdown=top_down, onerror=on_error)
class PureVersionedS3Path(PureS3Path):

@@ -583,0 +599,0 @@ """

@@ -8,3 +8,3 @@ #!/usr/bin/env python

name='s3path',
version='0.6.1',
version='0.6.2',
url='https://github.com/liormizr/s3path',

@@ -11,0 +11,0 @@ author='Lior Mizrahi',

import shutil
import sys
from datetime import timedelta
from pathlib import Path
from pathlib import Path, PosixPath
from io import UnsupportedOperation

@@ -213,4 +213,2 @@ from tempfile import NamedTemporaryFile

new_file.touch()
print()
print(f'Globing: {first_dir=}, pattern: "*"')
assert list(first_dir.glob("*")) == [S3Path('/my-bucket/first_dir/some_dir/')]

@@ -221,4 +219,2 @@

new_file.touch()
print()
print(f'Globing: {second_dir=}, pattern: "*"')
assert list(second_dir.glob("*")) == [S3Path('/my-bucket/first_dir/second_dir/some_dir/')]

@@ -229,4 +225,2 @@

new_file.touch()
print()
print(f'Globing: {third_dir=}, pattern: "*"')
assert list(third_dir.glob("*")) == [S3Path('/my-bucket/first_dir/second_dir/third_dir/some_dir/')]

@@ -505,2 +499,18 @@

@pytest.mark.skipif(sys.version_info < (3, 12), reason="requires python 3.12 or higher")
def test_issue_193(s3_mock):
s3 = boto3.resource('s3')
s3.create_bucket(Bucket='test-bucket')
object_summary = s3.ObjectSummary('test-bucket', 'docs/conf.py')
object_summary.put(Body=b'test data')
s3_path = S3Path('/test-bucket/docs')
assert sorted(s3_path.iterdir()) == sorted([
S3Path('/test-bucket/docs/conf.py'),
])
path = list(s3_path.iterdir())[0]
assert 'is_dir' in path._cache
assert not path._cache['is_dir']
def test_open_for_reading(s3_mock):

@@ -909,1 +919,101 @@ s3 = boto3.resource('s3')

assert target_path.read_bytes() == data
@pytest.mark.skipif(sys.version_info < (3, 12), reason="requires python 3.12 or higher")
def test_walk(s3_mock):
s3 = boto3.resource('s3')
s3.create_bucket(Bucket='test-bucket')
walk_test_results = [
(PosixPath('.'),
['.pytest_cache', 'tests', 'docs', 's3path', '.github', '.git', 's3path.egg-info', '.idea'],
['LICENSE', 'Makefile', 'MANIFEST.in', 'Pipfile', 'setup.py', '.gitignore', 'setup.cfg', 'README.rst',
'Pipfile.lock']),
(PosixPath('.pytest_cache'), ['v'], ['CACHEDIR.TAG', 'README.md', '.gitignore']),
(PosixPath('.pytest_cache/v'), ['cache'], []),
(PosixPath('.pytest_cache/v/cache'), [], ['nodeids', 'lastfailed', 'stepwise']),
(PosixPath('tests'), [],
['test_not_supported.py', 'conftest.py', 'test_path_operations.py', '__init__.py',
'test_s3path_configuration.py', 'test_pure_path_operations.py']),
(PosixPath('docs'), [],
['advance.rst', 's3path_graph.jpg', 's3path_graph.svg', 'comparison.rst', 'interface.rst']),
(PosixPath('s3path'), [],
['accessor.py', 'old_versions.py', '__init__.py', 'py.typed', 'current_version.py']),
(PosixPath('.github'), ['workflows'], []),
(PosixPath('.github/workflows'), [], ['deploying.yml', 'testing.yml']),
(PosixPath('.git'), ['objects', 'info', 'logs', 'hooks', 'refs'],
['config', 'HEAD', 'description', 'index', 'packed-refs']),
(PosixPath('.git/objects'), ['pack'], []),
(PosixPath('.git/objects/pack'), [],
['pack-746373b9d83ac407488288f60747a6de8ac71439.idx',
'pack-746373b9d83ac407488288f60747a6de8ac71439.pack']),
(PosixPath('.git/info'), [], ['exclude']),
(PosixPath('.git/logs'), ['refs'], ['HEAD']),
(PosixPath('.git/logs/refs'), ['heads', 'remotes'], []),
(PosixPath('.git/logs/refs/heads'), [], ['master']),
(PosixPath('.git/logs/refs/remotes'), ['origin'], []),
(PosixPath('.git/logs/refs/remotes/origin'), [], ['HEAD']),
(PosixPath('.git/hooks'), [],
['commit-msg.sample', 'pre-rebase.sample', 'pre-commit.sample', 'applypatch-msg.sample',
'fsmonitor-watchman.sample', 'pre-receive.sample', 'prepare-commit-msg.sample', 'post-update.sample',
'pre-merge-commit.sample', 'pre-applypatch.sample', 'pre-push.sample', 'update.sample',
'push-to-checkout.sample']),
(PosixPath('.git/refs'), ['heads', 'remotes'], []),
(PosixPath('.git/refs/heads'), [], ['master']),
(PosixPath('.git/refs/remotes'), ['origin'], []),
(PosixPath('.git/refs/remotes/origin'), [], ['HEAD']),
(PosixPath('s3path.egg-info'), [],
['PKG-INFO', 'SOURCES.txt', 'requires.txt', 'top_level.txt', 'dependency_links.txt']),
(PosixPath('.idea'), ['inspectionProfiles'],
['s3path.iml', 'vcs.xml', '.gitignore', 'workspace.xml', 'modules.xml', 'misc.xml']),
(PosixPath('.idea/inspectionProfiles'), [], ['profiles_settings.xml']),
]
for path, directories, files in walk_test_results:
for file in files:
key = str(path / file)
object_summary = s3.ObjectSummary('test-bucket', key)
object_summary.put(Body=b'test data')
compare = {}
for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(walk_test_results, S3Path('/test-bucket').walk()):
compare.setdefault(s3_path.key or '.', {})['s3'] = {'files': set(s3_files), 'directories': set(s3_directories)}
compare.setdefault(str(local_path), {})['local'] = {'files': set(local_files), 'directories': set(local_directories)}
for root, location in compare.items():
assert 's3' in location and 'local' in location
assert location['s3']['files'] == location['local']['files']
assert location['s3']['directories'] == location['local']['directories']
@pytest.mark.skipif(sys.version_info < (3, 12), reason="requires python 3.12 or higher")
def test_walk_order(s3_mock):
s3 = boto3.resource('s3')
s3.create_bucket(Bucket='test-bucket')
walk_test_results = [
(PosixPath('.'), ['.pytest_cache'], ['LICENSE', 'Makefile', 'setup.cfg', 'README.rst']),
(PosixPath('.pytest_cache'), ['v'], ['CACHEDIR.TAG', 'README.md', '.gitignore']),
(PosixPath('.pytest_cache/v'), ['cache'], []),
(PosixPath('.pytest_cache/v/cache'), [], ['nodeids', 'lastfailed', 'stepwise']),
]
for path, directories, files in walk_test_results:
for file in files:
key = str(path / file)
object_summary = s3.ObjectSummary('test-bucket', key)
object_summary.put(Body=b'test data')
for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(walk_test_results, S3Path('/test-bucket').walk()):
assert set(local_directories) == set(s3_directories)
assert set(local_files) == set(s3_files)
for (local_path, local_directories, local_files), (s3_path, s3_directories, s3_files) in zip(reversed(walk_test_results), S3Path('/test-bucket').walk(top_down=False)):
assert set(local_directories) == set(s3_directories)
assert set(local_files) == set(s3_files)
assert list(p for p in S3Path('/test-bucket/fake/').walk()) == []
def on_error(exception):
assert isinstance(exception, FileNotFoundError)
print(exception, '0'*30)
raise exception
with pytest.raises(FileNotFoundError):
for _ in S3Path('/test-bucket/fake/').walk(on_error=on_error):
pass

@@ -120,3 +120,7 @@

def test_open_method_with_custom_endpoint_url():
def test_open_method_with_custom_endpoint_url(s3_mock, reset_configuration_cache, monkeypatch):
s3 = boto3.resource('s3')
s3.create_bucket(Bucket='my-bucket')
monkeypatch.setattr(S3Path, 'exists', lambda self: True)
local_path = PureS3Path('/local/')

@@ -123,0 +127,0 @@ register_configuration_parameter(