Skip to content

Commit 353a579

Browse files
authored
Fix type hint problems for resource clients (#155)
1 parent cbed2b7 commit 353a579

File tree

5 files changed

+82
-78
lines changed

5 files changed

+82
-78
lines changed

CHANGELOG.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22

33
## [1.4.1](../../releases/tag/v1.4.1) - Unreleased
44

5-
...
5+
### Internal changes
6+
7+
- Fix type hint problems for resource clients
68

79
## [1.4.0](../../releases/tag/v1.4.0) - 2023-12-05
810

src/apify/_memory_storage/resource_clients/base_resource_client.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
from apify_shared.utils import ignore_docs
99

1010
if TYPE_CHECKING:
11+
from typing_extensions import Self
12+
1113
from ..memory_storage_client import MemoryStorageClient
1214

1315

@@ -48,9 +50,9 @@ def _get_storages_dir(cls: type[BaseResourceClient], memory_storage_client: Memo
4850
@classmethod
4951
@abstractmethod
5052
def _get_storage_client_cache(
51-
cls: type[BaseResourceClient],
53+
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
5254
memory_storage_client: MemoryStorageClient,
53-
) -> list[BaseResourceClient]:
55+
) -> list[Self]:
5456
raise NotImplementedError('You must override this method in the subclass!')
5557

5658
@abstractmethod
@@ -60,21 +62,21 @@ def _to_resource_info(self: BaseResourceClient) -> dict:
6062
@classmethod
6163
@abstractmethod
6264
def _create_from_directory(
63-
cls: type[BaseResourceClient],
65+
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
6466
storage_directory: str,
6567
memory_storage_client: MemoryStorageClient,
6668
id: str | None = None, # noqa: A002
6769
name: str | None = None,
68-
) -> BaseResourceClient:
70+
) -> Self:
6971
raise NotImplementedError('You must override this method in the subclass!')
7072

7173
@classmethod
7274
def _find_or_create_client_by_id_or_name(
73-
cls: type[BaseResourceClient],
75+
cls, # noqa: ANN102 # type annotated cls does not work with Self as a return type
7476
memory_storage_client: MemoryStorageClient,
7577
id: str | None = None, # noqa: A002
7678
name: str | None = None,
77-
) -> BaseResourceClient | None:
79+
) -> Self | None:
7880
assert id is not None or name is not None # noqa: S101
7981

8082
storage_client_cache = cls._get_storage_client_cache(memory_storage_client)

src/apify/_memory_storage/resource_clients/dataset.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,8 @@ async def get(self: DatasetClient) -> dict | None:
7474
found = self._find_or_create_client_by_id_or_name(memory_storage_client=self._memory_storage_client, id=self._id, name=self._name)
7575

7676
if found:
77-
async with found._file_operation_lock: # type: ignore
78-
await found._update_timestamps(has_been_modified=False) # type: ignore
77+
async with found._file_operation_lock:
78+
await found._update_timestamps(has_been_modified=False)
7979
return found._to_resource_info()
8080

8181
return None
@@ -103,7 +103,7 @@ async def update(self: DatasetClient, *, name: str | None = None) -> dict:
103103
if name is None:
104104
return existing_dataset_by_id._to_resource_info()
105105

106-
async with existing_dataset_by_id._file_operation_lock: # type: ignore
106+
async with existing_dataset_by_id._file_operation_lock:
107107
# Check that name is not in use already
108108
existing_dataset_by_name = next(
109109
(dataset for dataset in self._memory_storage_client._datasets_handled if dataset._name and dataset._name.lower() == name.lower()),
@@ -122,7 +122,7 @@ async def update(self: DatasetClient, *, name: str | None = None) -> dict:
122122
await force_rename(previous_dir, existing_dataset_by_id._resource_directory)
123123

124124
# Update timestamps
125-
await existing_dataset_by_id._update_timestamps(has_been_modified=True) # type: ignore
125+
await existing_dataset_by_id._update_timestamps(has_been_modified=True)
126126

127127
return existing_dataset_by_id._to_resource_info()
128128

@@ -193,19 +193,19 @@ async def list_items(
193193
if existing_dataset_by_id is None:
194194
raise_on_non_existing_storage(StorageTypes.DATASET, self._id)
195195

196-
async with existing_dataset_by_id._file_operation_lock: # type: ignore
197-
start, end = existing_dataset_by_id._get_start_and_end_indexes( # type: ignore
198-
max(existing_dataset_by_id._item_count - (offset or 0) - (limit or LIST_ITEMS_LIMIT), 0) if desc else offset or 0, # type: ignore
196+
async with existing_dataset_by_id._file_operation_lock:
197+
start, end = existing_dataset_by_id._get_start_and_end_indexes(
198+
max(existing_dataset_by_id._item_count - (offset or 0) - (limit or LIST_ITEMS_LIMIT), 0) if desc else offset or 0,
199199
limit,
200200
)
201201

202202
items = []
203203

204204
for idx in range(start, end):
205205
entry_number = self._generate_local_entry_name(idx)
206-
items.append(existing_dataset_by_id._dataset_entries[entry_number]) # type: ignore
206+
items.append(existing_dataset_by_id._dataset_entries[entry_number])
207207

208-
await existing_dataset_by_id._update_timestamps(has_been_modified=False) # type: ignore
208+
await existing_dataset_by_id._update_timestamps(has_been_modified=False)
209209

210210
if desc:
211211
items.reverse()
@@ -217,7 +217,7 @@ async def list_items(
217217
'items': items,
218218
'limit': limit or LIST_ITEMS_LIMIT,
219219
'offset': offset or 0,
220-
'total': existing_dataset_by_id._item_count, # type: ignore
220+
'total': existing_dataset_by_id._item_count,
221221
}
222222
)
223223

@@ -308,16 +308,16 @@ async def push_items(self: DatasetClient, items: JSONSerializable) -> None:
308308

309309
added_ids: list[str] = []
310310
for entry in normalized:
311-
existing_dataset_by_id._item_count += 1 # type: ignore
312-
idx = self._generate_local_entry_name(existing_dataset_by_id._item_count) # type: ignore
311+
existing_dataset_by_id._item_count += 1
312+
idx = self._generate_local_entry_name(existing_dataset_by_id._item_count)
313313

314-
existing_dataset_by_id._dataset_entries[idx] = entry # type: ignore
314+
existing_dataset_by_id._dataset_entries[idx] = entry
315315
added_ids.append(idx)
316316

317-
data_entries = [(id, existing_dataset_by_id._dataset_entries[id]) for id in added_ids] # type: ignore # noqa: A001
317+
data_entries = [(id, existing_dataset_by_id._dataset_entries[id]) for id in added_ids] # noqa: A001
318318

319-
async with existing_dataset_by_id._file_operation_lock: # type: ignore
320-
await existing_dataset_by_id._update_timestamps(has_been_modified=True) # type: ignore
319+
async with existing_dataset_by_id._file_operation_lock:
320+
await existing_dataset_by_id._update_timestamps(has_been_modified=True)
321321

322322
await _update_dataset_items(
323323
data=data_entries,
@@ -385,7 +385,7 @@ def _get_storages_dir(cls: type[DatasetClient], memory_storage_client: MemorySto
385385
return memory_storage_client._datasets_directory
386386

387387
@classmethod
388-
def _get_storage_client_cache( # type: ignore
388+
def _get_storage_client_cache(
389389
cls: type[DatasetClient],
390390
memory_storage_client: MemoryStorageClient,
391391
) -> list[DatasetClient]:

src/apify/_memory_storage/resource_clients/key_value_store.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,8 @@ async def get(self: KeyValueStoreClient) -> dict | None:
100100
found = self._find_or_create_client_by_id_or_name(memory_storage_client=self._memory_storage_client, id=self._id, name=self._name)
101101

102102
if found:
103-
async with found._file_operation_lock: # type: ignore
104-
await found._update_timestamps(has_been_modified=False) # type: ignore
103+
async with found._file_operation_lock:
104+
await found._update_timestamps(has_been_modified=False)
105105
return found._to_resource_info()
106106

107107
return None
@@ -127,7 +127,7 @@ async def update(self: KeyValueStoreClient, *, name: str | None = None) -> dict:
127127
if name is None:
128128
return existing_store_by_id._to_resource_info()
129129

130-
async with existing_store_by_id._file_operation_lock: # type: ignore
130+
async with existing_store_by_id._file_operation_lock:
131131
# Check that name is not in use already
132132
existing_store_by_name = next(
133133
(store for store in self._memory_storage_client._key_value_stores_handled if store._name and store._name.lower() == name.lower()),
@@ -146,7 +146,7 @@ async def update(self: KeyValueStoreClient, *, name: str | None = None) -> dict:
146146
await force_rename(previous_dir, existing_store_by_id._resource_directory)
147147

148148
# Update timestamps
149-
await existing_store_by_id._update_timestamps(has_been_modified=True) # type: ignore
149+
await existing_store_by_id._update_timestamps(has_been_modified=True)
150150

151151
return existing_store_by_id._to_resource_info()
152152

@@ -187,7 +187,7 @@ async def list_keys(
187187

188188
items = []
189189

190-
for record in existing_store_by_id._records.values(): # type: ignore
190+
for record in existing_store_by_id._records.values():
191191
size = len(record['value'])
192192
items.append(
193193
{
@@ -222,8 +222,8 @@ async def list_keys(
222222
is_last_selected_item_absolutely_last = last_item_in_store == last_selected_item
223223
next_exclusive_start_key = None if is_last_selected_item_absolutely_last else last_selected_item['key']
224224

225-
async with existing_store_by_id._file_operation_lock: # type: ignore
226-
await existing_store_by_id._update_timestamps(has_been_modified=False) # type: ignore
225+
async with existing_store_by_id._file_operation_lock:
226+
await existing_store_by_id._update_timestamps(has_been_modified=False)
227227

228228
return {
229229
'count': len(items),
@@ -247,7 +247,7 @@ async def _get_record_internal(
247247
if existing_store_by_id is None:
248248
raise_on_non_existing_storage(StorageTypes.KEY_VALUE_STORE, self._id)
249249

250-
stored_record = existing_store_by_id._records.get(key) # type: ignore
250+
stored_record = existing_store_by_id._records.get(key)
251251

252252
if stored_record is None:
253253
return None
@@ -264,8 +264,8 @@ async def _get_record_internal(
264264
except ValueError:
265265
logger.exception('Error parsing key-value store record')
266266

267-
async with existing_store_by_id._file_operation_lock: # type: ignore
268-
await existing_store_by_id._update_timestamps(has_been_modified=False) # type: ignore
267+
async with existing_store_by_id._file_operation_lock:
268+
await existing_store_by_id._update_timestamps(has_been_modified=False)
269269

270270
return record
271271

@@ -324,22 +324,22 @@ async def set_record(self: KeyValueStoreClient, key: str, value: Any, content_ty
324324
if 'application/json' in content_type and not is_file_or_bytes(value) and not isinstance(value, str):
325325
value = json_dumps(value).encode('utf-8')
326326

327-
async with existing_store_by_id._file_operation_lock: # type: ignore
328-
await existing_store_by_id._update_timestamps(has_been_modified=True) # type: ignore
327+
async with existing_store_by_id._file_operation_lock:
328+
await existing_store_by_id._update_timestamps(has_been_modified=True)
329329
record: KeyValueStoreRecord = {
330330
'key': key,
331331
'value': value,
332332
'contentType': content_type,
333333
}
334334

335-
old_record = existing_store_by_id._records.get(key) # type: ignore
336-
existing_store_by_id._records[key] = record # type: ignore
335+
old_record = existing_store_by_id._records.get(key)
336+
existing_store_by_id._records[key] = record
337337

338338
if self._memory_storage_client._persist_storage:
339339
if old_record is not None and _filename_from_record(old_record) != _filename_from_record(record):
340-
await existing_store_by_id._delete_persisted_record(old_record) # type: ignore
340+
await existing_store_by_id._delete_persisted_record(old_record)
341341

342-
await existing_store_by_id._persist_record(record) # type: ignore
342+
await existing_store_by_id._persist_record(record)
343343

344344
async def _persist_record(self: KeyValueStoreClient, record: KeyValueStoreRecord) -> None:
345345
store_directory = self._resource_directory
@@ -385,14 +385,14 @@ async def delete_record(self: KeyValueStoreClient, key: str) -> None:
385385
if existing_store_by_id is None:
386386
raise_on_non_existing_storage(StorageTypes.KEY_VALUE_STORE, self._id)
387387

388-
record = existing_store_by_id._records.get(key) # type: ignore
388+
record = existing_store_by_id._records.get(key)
389389

390390
if record is not None:
391-
async with existing_store_by_id._file_operation_lock: # type: ignore
392-
del existing_store_by_id._records[key] # type: ignore
393-
await existing_store_by_id._update_timestamps(has_been_modified=True) # type: ignore
391+
async with existing_store_by_id._file_operation_lock:
392+
del existing_store_by_id._records[key]
393+
await existing_store_by_id._update_timestamps(has_been_modified=True)
394394
if self._memory_storage_client._persist_storage:
395-
await existing_store_by_id._delete_persisted_record(record) # type: ignore
395+
await existing_store_by_id._delete_persisted_record(record)
396396

397397
async def _delete_persisted_record(self: KeyValueStoreClient, record: KeyValueStoreRecord) -> None:
398398
store_directory = self._resource_directory
@@ -437,7 +437,7 @@ def _get_storages_dir(cls: type[KeyValueStoreClient], memory_storage_client: Mem
437437
return memory_storage_client._key_value_stores_directory
438438

439439
@classmethod
440-
def _get_storage_client_cache( # type: ignore
440+
def _get_storage_client_cache(
441441
cls: type[KeyValueStoreClient],
442442
memory_storage_client: MemoryStorageClient,
443443
) -> list[KeyValueStoreClient]:

0 commit comments

Comments
 (0)