Skip to content

Commit 4ce12e5

Browse files
Tony96301rahul003
authored andcommitted
Supporting s3handler with multiprocessing (aws#48)
* Supporting s3handler with multiprocessing * Specific exception
1 parent 4cb65bb commit 4ce12e5

File tree

1 file changed

+13
-2
lines changed

1 file changed

+13
-2
lines changed

tornasole_core/access_layer/s3handler.py

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,18 @@
33
from tornasole_core.utils import is_s3, get_logger
44
import logging
55
import time
6-
import nest_asyncio
7-
nest_asyncio.apply()
86

97

8+
def check_notebook():
9+
# try to see if we are in an iPython environment and import nest_asyncio appropriately
10+
#
11+
try:
12+
get_ipython()
13+
import nest_asyncio
14+
nest_asyncio.apply()
15+
except NameError:
16+
pass
17+
1018
# Must be created for ANY file read request, whether from S3 or Local
1119
# If you wish to download entire file, leave length as None and start as 0.
1220
# If length is None, start must be 0.
@@ -38,7 +46,10 @@ class S3Handler:
3846
# For debug flag, first set PYTHONASYNCIODEBUG=1 in terminal.
3947
# This provides terminal output revealing details about the AsyncIO calls and timings that may be useful.
4048
# num_retries: the number of times to retry a download or connection before logging an exception.
49+
4150
def __init__(self, num_retries=5, debug=False):
51+
# if you are creating an s3handler object in jupyter, ensure the nest_asyncio is applied
52+
check_notebook()
4253
self.loop = asyncio.get_event_loop()
4354
self.client = aioboto3.client('s3', loop=self.loop)
4455
self.num_retries = num_retries

0 commit comments

Comments
 (0)