Skip to content

Commit cd7ea45

Browse files
committed
We now delete temporary files after release.
- Eliminated potential temporary file naming collisions by using the file-counter as the filename. - Fixed progress-reporting in the log (printing the function __str__ rather than the float). - Renamed _logger to _LOGGER in the OF module.
1 parent 9ff5774 commit cd7ea45

File tree

2 files changed

+54
-39
lines changed

2 files changed

+54
-39
lines changed

gdrivefs/gdfs/opened_file.py

Lines changed: 48 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from gdrivefs.gdtool.drive import get_gdrive
2020
from gdrivefs.general.buffer_segments import BufferSegments
2121

22-
_logger = logging.getLogger(__name__)
22+
_LOGGER = logging.getLogger(__name__)
2323

2424
# TODO(dustin): LCM runs in a greenlet pool. When we open a file that needs the
2525
# existing data for a file (read, append), a switch is done to an
@@ -36,9 +36,10 @@ class _OpenedManager(object):
3636
def __init__(self):
3737
self.__opened = {}
3838
self.__opened_byfile = {}
39+
self.__counter = 0
3940

4041
self.__temp_path = tempfile.mkdtemp()
41-
_logger.debug("Opened-file working directory: [%s]", self.__temp_path)
42+
_LOGGER.debug("Opened-file working directory: [%s]", self.__temp_path)
4243

4344
def __del__(self):
4445
shutil.rmtree(self.__temp_path)
@@ -55,6 +56,8 @@ def get_new_handle(self):
5556
cls = self.__class__
5657
max_handles = self.__get_max_handles()
5758

59+
self.__counter += 1
60+
5861
with cls.__opened_lock:
5962
if len(self.__opened) >= (max_handles + 1):
6063
raise fuse.FuseOSError(EMFILE)
@@ -67,13 +70,13 @@ def get_new_handle(self):
6770
cls.__fh_counter = 1
6871

6972
if cls.__fh_counter not in self.__opened:
70-
_logger.debug("Assigning file-handle (%d).",
73+
_LOGGER.debug("Assigning file-handle (%d).",
7174
cls.__fh_counter)
7275

7376
return cls.__fh_counter
7477

7578
message = "Could not allocate new file handle. Safety breach."
76-
_logger.error(message)
79+
_LOGGER.error(message)
7780
raise Exception(message)
7881

7982
def add(self, opened_file, fh=None):
@@ -92,7 +95,7 @@ def add(self, opened_file, fh=None):
9295
message = ("Opened-file with file-handle (%d) has already been"
9396
" registered." % (opened_file.fh))
9497

95-
_logger.error(message)
98+
_LOGGER.error(message)
9699
raise Exception(message)
97100

98101
self.__opened[fh] = opened_file
@@ -111,7 +114,7 @@ def remove_by_fh(self, fh):
111114
cls = self.__class__
112115

113116
with cls.__opened_lock:
114-
_logger.debug("Closing opened-file with handle (%d).", fh)
117+
_LOGGER.debug("Closing opened-file with handle (%d).", fh)
115118

116119
file_path = self.__opened[fh].file_path
117120
del self.__opened[fh]
@@ -131,7 +134,7 @@ def remove_by_filepath(self, file_path):
131134

132135
cls = self.__class__
133136

134-
_logger.debug("Removing all open handles for file-path [%s].",
137+
_LOGGER.debug("Removing all open handles for file-path [%s].",
135138
file_path)
136139

137140
count = 0
@@ -144,7 +147,7 @@ def remove_by_filepath(self, file_path):
144147
except KeyError:
145148
pass
146149

147-
_logger.debug("(%d) file-handles removed for file-path [%s].",
150+
_LOGGER.debug("(%d) file-handles removed for file-path [%s].",
148151
count, file_path)
149152

150153
def get_by_fh(self, fh):
@@ -157,11 +160,15 @@ def get_by_fh(self, fh):
157160
message = ("Opened-file with file-handle (%d) is not "
158161
"registered (get_by_fh)." % (fh))
159162

160-
_logger.error(message)
163+
_LOGGER.error(message)
161164
raise Exception(message)
162165

163166
return self.__opened[fh]
164167

168+
@property
169+
def opened_count(self):
170+
return self.__counter
171+
165172
@property
166173
def temp_path(self):
167174
return self.__temp_path
@@ -183,7 +190,7 @@ def __init__(self, entry_id, path, filename, is_hidden, mime_type):
183190

184191
_OPENED_ENTRIES.add(entry_id)
185192

186-
_logger.info("Opened-file object created for entry-ID [%s] and path "
193+
_LOGGER.info("Opened-file object created for entry-ID [%s] and path "
187194
"(%s).", entry_id, path)
188195

189196
self.__entry_id = entry_id
@@ -197,9 +204,12 @@ def __init__(self, entry_id, path, filename, is_hidden, mime_type):
197204
self.__is_loaded = False
198205
self.__is_dirty = False
199206

200-
temp_filename = self.__entry_id.encode('ASCII')
207+
# Use the monotonically incremented `opened_count` to produce a unique
208+
# temporary filepath.
209+
201210
om = get_om()
202-
self.__temp_filepath = os.path.join(om.temp_path, temp_filename)
211+
self.__temp_filepath = \
212+
os.path.join(om.temp_path, str(om.opened_count))
203213

204214
self.__fh = None
205215

@@ -219,7 +229,11 @@ def __del__(self):
219229
"""
220230

221231
if self.__fh is not None:
232+
_LOGGER.debug("Removing temporary file [%s] ([%s]).",
233+
self.__temp_filepath, self.file_path)
234+
222235
self.__fh.close()
236+
os.unlink(self.__temp_filepath)
223237

224238
with _OPENED_ENTRIES_LOCK:
225239
_OPENED_ENTRIES.remove(self.__entry_id)
@@ -251,7 +265,7 @@ def __load_base_from_remote(self):
251265

252266
# If it's loaded and not-changed, don't do anything.
253267
if self.__is_loaded is True and self.__is_dirty is False:
254-
_logger.debug("Not syncing-down non-dirty file.")
268+
_LOGGER.debug("Not syncing-down non-dirty file.")
255269
return
256270

257271
if self.__fh is not None:
@@ -260,13 +274,13 @@ def __load_base_from_remote(self):
260274

261275
entry = self.__cache.get(self.__entry_id)
262276

263-
_logger.debug("Ensuring local availability of [%s]: [%s]",
277+
_LOGGER.debug("Ensuring local availability of [%s]: [%s]",
264278
entry, self.__temp_filepath)
265279

266280
# Get the current version of the write-cache file, or note that we
267281
# don't have it.
268282

269-
_logger.info("Attempting local cache update of file [%s] for entry "
283+
_LOGGER.info("Attempting local cache update of file [%s] for entry "
270284
"[%s] and mime-type [%s].",
271285
self.__temp_filepath, entry, self.mime_type)
272286

@@ -279,7 +293,7 @@ def __load_base_from_remote(self):
279293
self.__fh = open(self.__temp_filepath, 'w+')
280294
self.__fh.write(stub_data)
281295
else:
282-
_logger.debug("Executing the download: [%s] => [%s]",
296+
_LOGGER.debug("Executing the download: [%s] => [%s]",
283297
entry.id, self.__temp_filepath)
284298

285299
try:
@@ -298,22 +312,22 @@ def __load_base_from_remote(self):
298312

299313
(length, cache_fault) = result
300314
except ExportFormatError:
301-
_logger.exception("There was an export-format error.")
315+
_LOGGER.exception("There was an export-format error.")
302316
raise fuse.FuseOSError(ENOENT)
303317

304318
self.__fh = open(self.__temp_filepath, 'r+')
305319

306320
self.__is_dirty = False
307321
self.__is_loaded = True
308322

309-
_logger.debug("Established base file-data for [%s]: [%s]",
323+
_LOGGER.debug("Established base file-data for [%s]: [%s]",
310324
entry, self.__temp_filepath)
311325

312326
@dec_hint(['offset', 'data'], ['data'], 'OF')
313327
def add_update(self, offset, data):
314328
"""Queue an update to this file."""
315329

316-
_logger.debug("Applying update for offset (%d) and length (%d).",
330+
_LOGGER.debug("Applying update for offset (%d) and length (%d).",
317331
offset, len(data))
318332

319333
self.__is_dirty = True
@@ -325,12 +339,12 @@ def add_update(self, offset, data):
325339
def flush(self):
326340
"""The OS wants to effect any changes made to the file."""
327341

328-
_logger.debug("Flushing opened-file.")
342+
_LOGGER.debug("Flushing opened-file.")
329343

330344
entry = self.__cache.get(self.__entry_id)
331345

332346
if self.__is_dirty is False:
333-
_logger.debug("Flush will be skipped for [%s] because there "
347+
_LOGGER.debug("Flush will be skipped for [%s] because there "
334348
"are no changes: [%s] IS_LOADED=[%s] "
335349
"IS_DIRTY=[%d]",
336350
entry.id, self.file_path, self.__is_loaded,
@@ -339,7 +353,7 @@ def flush(self):
339353
else:
340354
st = os.stat(self.__temp_filepath)
341355

342-
_logger.debug("Pushing (%d) bytes for entry with ID from [%s] to "
356+
_LOGGER.debug("Pushing (%d) bytes for entry with ID from [%s] to "
343357
"GD for file-path [%s].",
344358
st.st_size, entry.id, self.__temp_filepath)
345359

@@ -360,17 +374,17 @@ def flush(self):
360374

361375
# Immediately update our current cached entry.
362376

363-
_logger.debug("Update successful. Updating local cache.")
377+
_LOGGER.debug("Update successful. Updating local cache.")
364378

365379
path_relations = PathRelations.get_instance()
366380
path_relations.register_entry(entry)
367381

368-
_logger.info("Update complete on entry with ID [%s].", entry.id)
382+
_LOGGER.info("Update complete on entry with ID [%s].", entry.id)
369383

370384
@dec_hint(['offset', 'length'], prefix='OF')
371385
def read(self, offset, length):
372386

373-
_logger.debug("Reading (%d) bytes at offset (%d).", length, offset)
387+
_LOGGER.debug("Reading (%d) bytes at offset (%d).", length, offset)
374388

375389
# We don't care if the cache file is dirty (not on this system, at
376390
# least).
@@ -382,11 +396,11 @@ def read(self, offset, length):
382396

383397
len_ = len(data)
384398

385-
_logger.debug("(%d) bytes retrieved from slice (%d):(%d)/(%d).",
399+
_LOGGER.debug("(%d) bytes retrieved from slice (%d):(%d)/(%d).",
386400
len_, offset, length, st.st_size)
387401

388402
if len_ != length:
389-
_logger.warning("Read request is only returning (%d) bytes when "
403+
_LOGGER.warning("Read request is only returning (%d) bytes when "
390404
"(%d) bytes were requested.", len_, length)
391405

392406
return data
@@ -411,14 +425,14 @@ def create_for_existing_filepath(filepath):
411425
the information.
412426
"""
413427

414-
_logger.debug("Creating OpenedFile for [%s].", filepath)
428+
_LOGGER.debug("Creating OpenedFile for [%s].", filepath)
415429

416430
# Process/distill the requested file-path.
417431

418432
try:
419433
result = split_path(filepath, path_resolver)
420434
except GdNotFoundError:
421-
_logger.exception("Could not process [%s] (create_for_requested).",
435+
_LOGGER.exception("Could not process [%s] (create_for_requested).",
422436
filepath)
423437

424438
raise fuse.FuseOSError(ENOENT)
@@ -434,13 +448,13 @@ def create_for_existing_filepath(filepath):
434448
entry_clause = path_relations.get_clause_from_path(
435449
distilled_filepath)
436450
except:
437-
_logger.exception("Could not try to get clause from path [%s] "
451+
_LOGGER.exception("Could not try to get clause from path [%s] "
438452
"(OpenedFile).", distilled_filepath)
439453

440454
raise fuse.FuseOSError(EIO)
441455

442456
if not entry_clause:
443-
_logger.debug("Path [%s] does not exist for stat().", path)
457+
_LOGGER.debug("Path [%s] does not exist for stat().", path)
444458
raise fuse.FuseOSError(ENOENT)
445459

446460
entry = entry_clause[CLAUSE_ENTRY]
@@ -453,18 +467,18 @@ def create_for_existing_filepath(filepath):
453467
try:
454468
final_mimetype = entry.normalize_download_mimetype(mime_type)
455469
except ExportFormatError:
456-
_logger.exception("There was an export-format error "
470+
_LOGGER.exception("There was an export-format error "
457471
"(create_for_requested_filesystem).")
458472

459473
raise fuse.FuseOSError(ENOENT)
460474
except:
461-
_logger.exception("Could not normalize mime-type [%s] for entry"
475+
_LOGGER.exception("Could not normalize mime-type [%s] for entry"
462476
"[%s].", mime_type, entry)
463477

464478
raise fuse.FuseOSError(EIO)
465479

466480
if final_mimetype != mime_type:
467-
_logger.info("Entry being opened will be opened as [%s] rather "
481+
_LOGGER.info("Entry being opened will be opened as [%s] rather "
468482
"than [%s].", final_mimetype, mime_type)
469483

470484
# Build the object.

gdrivefs/gdtool/drive.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -273,8 +273,8 @@ def get_children_under_parent_id(self,
273273
client = self.__auth.get_client()
274274

275275
assert \
276-
(query_contains_string is not None and
277-
query_is_string is not None) is False,
276+
(query_contains_string is not None and \
277+
query_is_string is not None) is False, \
278278
"The query_contains_string and query_is_string parameters are "\
279279
"mutually exclusive."
280280

@@ -458,12 +458,13 @@ def download_to_local(self, output_file_path, normalized_entry, mime_type,
458458
_logger.debug("Read chunk: STATUS=[%s] DONE=[%s] "
459459
"TOTAL_SIZE=[%s]", status, done, total_size)
460460

461+
p = status.progress()
462+
461463
_logger.debug("Chunk: PROGRESS=[%s] TOTAL-SIZE=[%s] "
462464
"RESUMABLE-PROGRESS=[%s]",
463-
status.progress, status.total_size,
464-
status.resumable_progress)
465+
p, status.total_size, status.resumable_progress)
465466

466-
percent = status.progress() if status.total_size > 0 else 100.0
467+
percent = p if status.total_size > 0 else 100.0
467468
# TODO(dustin): This just places an arbitrary limit on the number of empty
468469
# chunks we can receive. Can we drop this to 1?
469470
if len(progresses) >= _MAX_EMPTY_CHUNKS:

0 commit comments

Comments
 (0)