19
19
from gdrivefs .gdtool .drive import get_gdrive
20
20
from gdrivefs .general .buffer_segments import BufferSegments
21
21
22
- _logger = logging .getLogger (__name__ )
22
+ _LOGGER = logging .getLogger (__name__ )
23
23
24
24
# TODO(dustin): LCM runs in a greenlet pool. When we open a file that needs the
25
25
# existing data for a file (read, append), a switch is done to an
@@ -36,9 +36,10 @@ class _OpenedManager(object):
36
36
def __init__ (self ):
37
37
self .__opened = {}
38
38
self .__opened_byfile = {}
39
+ self .__counter = 0
39
40
40
41
self .__temp_path = tempfile .mkdtemp ()
41
- _logger .debug ("Opened-file working directory: [%s]" , self .__temp_path )
42
+ _LOGGER .debug ("Opened-file working directory: [%s]" , self .__temp_path )
42
43
43
44
def __del__ (self ):
44
45
shutil .rmtree (self .__temp_path )
@@ -55,6 +56,8 @@ def get_new_handle(self):
55
56
cls = self .__class__
56
57
max_handles = self .__get_max_handles ()
57
58
59
+ self .__counter += 1
60
+
58
61
with cls .__opened_lock :
59
62
if len (self .__opened ) >= (max_handles + 1 ):
60
63
raise fuse .FuseOSError (EMFILE )
@@ -67,13 +70,13 @@ def get_new_handle(self):
67
70
cls .__fh_counter = 1
68
71
69
72
if cls .__fh_counter not in self .__opened :
70
- _logger .debug ("Assigning file-handle (%d)." ,
73
+ _LOGGER .debug ("Assigning file-handle (%d)." ,
71
74
cls .__fh_counter )
72
75
73
76
return cls .__fh_counter
74
77
75
78
message = "Could not allocate new file handle. Safety breach."
76
- _logger .error (message )
79
+ _LOGGER .error (message )
77
80
raise Exception (message )
78
81
79
82
def add (self , opened_file , fh = None ):
@@ -92,7 +95,7 @@ def add(self, opened_file, fh=None):
92
95
message = ("Opened-file with file-handle (%d) has already been"
93
96
" registered." % (opened_file .fh ))
94
97
95
- _logger .error (message )
98
+ _LOGGER .error (message )
96
99
raise Exception (message )
97
100
98
101
self .__opened [fh ] = opened_file
@@ -111,7 +114,7 @@ def remove_by_fh(self, fh):
111
114
cls = self .__class__
112
115
113
116
with cls .__opened_lock :
114
- _logger .debug ("Closing opened-file with handle (%d)." , fh )
117
+ _LOGGER .debug ("Closing opened-file with handle (%d)." , fh )
115
118
116
119
file_path = self .__opened [fh ].file_path
117
120
del self .__opened [fh ]
@@ -131,7 +134,7 @@ def remove_by_filepath(self, file_path):
131
134
132
135
cls = self .__class__
133
136
134
- _logger .debug ("Removing all open handles for file-path [%s]." ,
137
+ _LOGGER .debug ("Removing all open handles for file-path [%s]." ,
135
138
file_path )
136
139
137
140
count = 0
@@ -144,7 +147,7 @@ def remove_by_filepath(self, file_path):
144
147
except KeyError :
145
148
pass
146
149
147
- _logger .debug ("(%d) file-handles removed for file-path [%s]." ,
150
+ _LOGGER .debug ("(%d) file-handles removed for file-path [%s]." ,
148
151
count , file_path )
149
152
150
153
def get_by_fh (self , fh ):
@@ -157,11 +160,15 @@ def get_by_fh(self, fh):
157
160
message = ("Opened-file with file-handle (%d) is not "
158
161
"registered (get_by_fh)." % (fh ))
159
162
160
- _logger .error (message )
163
+ _LOGGER .error (message )
161
164
raise Exception (message )
162
165
163
166
return self .__opened [fh ]
164
167
168
+ @property
169
+ def opened_count (self ):
170
+ return self .__counter
171
+
165
172
@property
166
173
def temp_path (self ):
167
174
return self .__temp_path
@@ -183,7 +190,7 @@ def __init__(self, entry_id, path, filename, is_hidden, mime_type):
183
190
184
191
_OPENED_ENTRIES .add (entry_id )
185
192
186
- _logger .info ("Opened-file object created for entry-ID [%s] and path "
193
+ _LOGGER .info ("Opened-file object created for entry-ID [%s] and path "
187
194
"(%s)." , entry_id , path )
188
195
189
196
self .__entry_id = entry_id
@@ -197,9 +204,12 @@ def __init__(self, entry_id, path, filename, is_hidden, mime_type):
197
204
self .__is_loaded = False
198
205
self .__is_dirty = False
199
206
200
- temp_filename = self .__entry_id .encode ('ASCII' )
207
+ # Use the monotonically incremented `opened_count` to produce a unique
208
+ # temporary filepath.
209
+
201
210
om = get_om ()
202
- self .__temp_filepath = os .path .join (om .temp_path , temp_filename )
211
+ self .__temp_filepath = \
212
+ os .path .join (om .temp_path , str (om .opened_count ))
203
213
204
214
self .__fh = None
205
215
@@ -219,7 +229,11 @@ def __del__(self):
219
229
"""
220
230
221
231
if self .__fh is not None :
232
+ _LOGGER .debug ("Removing temporary file [%s] ([%s])." ,
233
+ self .__temp_filepath , self .file_path )
234
+
222
235
self .__fh .close ()
236
+ os .unlink (self .__temp_filepath )
223
237
224
238
with _OPENED_ENTRIES_LOCK :
225
239
_OPENED_ENTRIES .remove (self .__entry_id )
@@ -251,7 +265,7 @@ def __load_base_from_remote(self):
251
265
252
266
# If it's loaded and not-changed, don't do anything.
253
267
if self .__is_loaded is True and self .__is_dirty is False :
254
- _logger .debug ("Not syncing-down non-dirty file." )
268
+ _LOGGER .debug ("Not syncing-down non-dirty file." )
255
269
return
256
270
257
271
if self .__fh is not None :
@@ -260,13 +274,13 @@ def __load_base_from_remote(self):
260
274
261
275
entry = self .__cache .get (self .__entry_id )
262
276
263
- _logger .debug ("Ensuring local availability of [%s]: [%s]" ,
277
+ _LOGGER .debug ("Ensuring local availability of [%s]: [%s]" ,
264
278
entry , self .__temp_filepath )
265
279
266
280
# Get the current version of the write-cache file, or note that we
267
281
# don't have it.
268
282
269
- _logger .info ("Attempting local cache update of file [%s] for entry "
283
+ _LOGGER .info ("Attempting local cache update of file [%s] for entry "
270
284
"[%s] and mime-type [%s]." ,
271
285
self .__temp_filepath , entry , self .mime_type )
272
286
@@ -279,7 +293,7 @@ def __load_base_from_remote(self):
279
293
self .__fh = open (self .__temp_filepath , 'w+' )
280
294
self .__fh .write (stub_data )
281
295
else :
282
- _logger .debug ("Executing the download: [%s] => [%s]" ,
296
+ _LOGGER .debug ("Executing the download: [%s] => [%s]" ,
283
297
entry .id , self .__temp_filepath )
284
298
285
299
try :
@@ -298,22 +312,22 @@ def __load_base_from_remote(self):
298
312
299
313
(length , cache_fault ) = result
300
314
except ExportFormatError :
301
- _logger .exception ("There was an export-format error." )
315
+ _LOGGER .exception ("There was an export-format error." )
302
316
raise fuse .FuseOSError (ENOENT )
303
317
304
318
self .__fh = open (self .__temp_filepath , 'r+' )
305
319
306
320
self .__is_dirty = False
307
321
self .__is_loaded = True
308
322
309
- _logger .debug ("Established base file-data for [%s]: [%s]" ,
323
+ _LOGGER .debug ("Established base file-data for [%s]: [%s]" ,
310
324
entry , self .__temp_filepath )
311
325
312
326
@dec_hint (['offset' , 'data' ], ['data' ], 'OF' )
313
327
def add_update (self , offset , data ):
314
328
"""Queue an update to this file."""
315
329
316
- _logger .debug ("Applying update for offset (%d) and length (%d)." ,
330
+ _LOGGER .debug ("Applying update for offset (%d) and length (%d)." ,
317
331
offset , len (data ))
318
332
319
333
self .__is_dirty = True
@@ -325,12 +339,12 @@ def add_update(self, offset, data):
325
339
def flush (self ):
326
340
"""The OS wants to effect any changes made to the file."""
327
341
328
- _logger .debug ("Flushing opened-file." )
342
+ _LOGGER .debug ("Flushing opened-file." )
329
343
330
344
entry = self .__cache .get (self .__entry_id )
331
345
332
346
if self .__is_dirty is False :
333
- _logger .debug ("Flush will be skipped for [%s] because there "
347
+ _LOGGER .debug ("Flush will be skipped for [%s] because there "
334
348
"are no changes: [%s] IS_LOADED=[%s] "
335
349
"IS_DIRTY=[%d]" ,
336
350
entry .id , self .file_path , self .__is_loaded ,
@@ -339,7 +353,7 @@ def flush(self):
339
353
else :
340
354
st = os .stat (self .__temp_filepath )
341
355
342
- _logger .debug ("Pushing (%d) bytes for entry with ID from [%s] to "
356
+ _LOGGER .debug ("Pushing (%d) bytes for entry with ID from [%s] to "
343
357
"GD for file-path [%s]." ,
344
358
st .st_size , entry .id , self .__temp_filepath )
345
359
@@ -360,17 +374,17 @@ def flush(self):
360
374
361
375
# Immediately update our current cached entry.
362
376
363
- _logger .debug ("Update successful. Updating local cache." )
377
+ _LOGGER .debug ("Update successful. Updating local cache." )
364
378
365
379
path_relations = PathRelations .get_instance ()
366
380
path_relations .register_entry (entry )
367
381
368
- _logger .info ("Update complete on entry with ID [%s]." , entry .id )
382
+ _LOGGER .info ("Update complete on entry with ID [%s]." , entry .id )
369
383
370
384
@dec_hint (['offset' , 'length' ], prefix = 'OF' )
371
385
def read (self , offset , length ):
372
386
373
- _logger .debug ("Reading (%d) bytes at offset (%d)." , length , offset )
387
+ _LOGGER .debug ("Reading (%d) bytes at offset (%d)." , length , offset )
374
388
375
389
# We don't care if the cache file is dirty (not on this system, at
376
390
# least).
@@ -382,11 +396,11 @@ def read(self, offset, length):
382
396
383
397
len_ = len (data )
384
398
385
- _logger .debug ("(%d) bytes retrieved from slice (%d):(%d)/(%d)." ,
399
+ _LOGGER .debug ("(%d) bytes retrieved from slice (%d):(%d)/(%d)." ,
386
400
len_ , offset , length , st .st_size )
387
401
388
402
if len_ != length :
389
- _logger .warning ("Read request is only returning (%d) bytes when "
403
+ _LOGGER .warning ("Read request is only returning (%d) bytes when "
390
404
"(%d) bytes were requested." , len_ , length )
391
405
392
406
return data
@@ -411,14 +425,14 @@ def create_for_existing_filepath(filepath):
411
425
the information.
412
426
"""
413
427
414
- _logger .debug ("Creating OpenedFile for [%s]." , filepath )
428
+ _LOGGER .debug ("Creating OpenedFile for [%s]." , filepath )
415
429
416
430
# Process/distill the requested file-path.
417
431
418
432
try :
419
433
result = split_path (filepath , path_resolver )
420
434
except GdNotFoundError :
421
- _logger .exception ("Could not process [%s] (create_for_requested)." ,
435
+ _LOGGER .exception ("Could not process [%s] (create_for_requested)." ,
422
436
filepath )
423
437
424
438
raise fuse .FuseOSError (ENOENT )
@@ -434,13 +448,13 @@ def create_for_existing_filepath(filepath):
434
448
entry_clause = path_relations .get_clause_from_path (
435
449
distilled_filepath )
436
450
except :
437
- _logger .exception ("Could not try to get clause from path [%s] "
451
+ _LOGGER .exception ("Could not try to get clause from path [%s] "
438
452
"(OpenedFile)." , distilled_filepath )
439
453
440
454
raise fuse .FuseOSError (EIO )
441
455
442
456
if not entry_clause :
443
- _logger .debug ("Path [%s] does not exist for stat()." , path )
457
+ _LOGGER .debug ("Path [%s] does not exist for stat()." , path )
444
458
raise fuse .FuseOSError (ENOENT )
445
459
446
460
entry = entry_clause [CLAUSE_ENTRY ]
@@ -453,18 +467,18 @@ def create_for_existing_filepath(filepath):
453
467
try :
454
468
final_mimetype = entry .normalize_download_mimetype (mime_type )
455
469
except ExportFormatError :
456
- _logger .exception ("There was an export-format error "
470
+ _LOGGER .exception ("There was an export-format error "
457
471
"(create_for_requested_filesystem)." )
458
472
459
473
raise fuse .FuseOSError (ENOENT )
460
474
except :
461
- _logger .exception ("Could not normalize mime-type [%s] for entry"
475
+ _LOGGER .exception ("Could not normalize mime-type [%s] for entry"
462
476
"[%s]." , mime_type , entry )
463
477
464
478
raise fuse .FuseOSError (EIO )
465
479
466
480
if final_mimetype != mime_type :
467
- _logger .info ("Entry being opened will be opened as [%s] rather "
481
+ _LOGGER .info ("Entry being opened will be opened as [%s] rather "
468
482
"than [%s]." , final_mimetype , mime_type )
469
483
470
484
# Build the object.
0 commit comments