diff --git a/CITATION.cff b/CITATION.cff
index ef70e38..4783c43 100644
--- a/CITATION.cff
+++ b/CITATION.cff
@@ -9,11 +9,11 @@ identifiers:
value: 10.5281/zenodo.10799219
description: This is the collection of archived snapshots of all versions of thread.
- type: url
- value: https://github.com/python-thread/thread/releases/tag/v1.1.1
- description: The GitHub release URL of tag v1.1.1.
+ value: https://github.com/python-thread/thread/releases/tag/v2.0.0
+ description: The GitHub release URL of tag v2.0.0.
- type: url
- value: https://pypi.org/project/thread/1.1.1
- description: The PyPI release URL of tag v1.1.1.
+ value: https://pypi.org/project/thread/2.0.0
+ description: The PyPI release URL of tag v2.0.0.
cff-version: 1.2.0
date-released: 2024-03-07
keywords:
@@ -32,6 +32,6 @@ repository-code: https://github.com/python-thread/thread
repository-artifact: https://pypi.org/project/thread
title: thread
type: software
-version: 1.1.1
+version: 2.0.0
url: https://thread.ngjx.org
diff --git a/README.md b/README.md
index e130d64..48483d0 100644
--- a/README.md
+++ b/README.md
@@ -79,7 +79,7 @@ _Below is an example of how you can install and use thread._
2. Import thread into your library!
```py
import thread
- from thread import Thread, ParallelProcessing
+ from thread import Thread, ConcurrentProcessing
```
(back to top)
@@ -98,7 +98,7 @@ Our docs are [here!](https://thread.ngjx.org)
## Roadmap
-- [x] v1.1.1 Release
+- [x] v2.0.0 Release
- [ ] Bug fixes
- [ ] New features
- [ ] Testing
diff --git a/pyproject.toml b/pyproject.toml
index 6e679dd..d4b5930 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "thread"
-version = "1.1.1"
+version = "2.0.0"
description = "Threading module extension"
authors = ["Alex "]
license = "BSD-3-Clause"
@@ -30,7 +30,7 @@ classifiers = [
[tool.poetry.urls]
Homepage = "https://thread.ngjx.org"
-Documentation = "https://thread.ngjx.org/docs/v1.1.1"
+Documentation = "https://thread.ngjx.org/docs/v2.0.0"
Source = "https://github.com/python-thread/thread"
Download = "https://pypi.org/project/thread/#files"
"Release Notes" = "https://github.com/python-thread/thread/releases"
diff --git a/src/thread/__init__.py b/src/thread/__init__.py
index 2d7ac9a..1ab7af1 100644
--- a/src/thread/__init__.py
+++ b/src/thread/__init__.py
@@ -1,11 +1,11 @@
"""
## Thread Library
-Documentation at https://thread.ngjx.org/docs/v1.1.1
+Documentation at https://thread.ngjx.org/docs/2.0.0
---
-Released under the GPG-3 License
+Released under the BSD-3 License
Copyright (c) thread.ngjx.org, All rights reserved
"""
@@ -18,11 +18,11 @@
"""
-__version__ = '1.1.1'
+__version__ = '2.0.0'
# Export Core
-from .thread import Thread, ParallelProcessing
+from .thread import Thread, ConcurrentProcessing
from . import _types as types, exceptions
@@ -39,7 +39,7 @@
# Wildcard Export
__all__ = [
'Thread',
- 'ParallelProcessing',
+ 'ConcurrentProcessing',
'threaded',
'processor',
'types',
diff --git a/src/thread/_types.py b/src/thread/_types.py
index 98e0c8d..8642708 100644
--- a/src/thread/_types.py
+++ b/src/thread/_types.py
@@ -1,7 +1,7 @@
"""
## Types
-Documentation: https://thread.ngjx.org/docs/v1.1.1
+Documentation: https://thread.ngjx.org/docs/v2.0.0
"""
from typing import Any, Literal, Callable, Union, Sized
diff --git a/src/thread/decorators/_processor.py b/src/thread/decorators/_processor.py
index 0131d69..50dabc0 100644
--- a/src/thread/decorators/_processor.py
+++ b/src/thread/decorators/_processor.py
@@ -1,11 +1,11 @@
"""
## Processor
-Documentation: https://thread.ngjx.org/docs/v1.1.1
+Documentation: https://thread.ngjx.org/docs/v2.0.0
"""
from functools import wraps
-from ..thread import ParallelProcessing
+from ..thread import ConcurrentProcessing
from .._types import (
Overflow_In,
@@ -29,7 +29,7 @@
NoParamReturn = Callable[
Concatenate[Dataset[_DataT], _TargetP],
- ParallelProcessing[_TargetP, _TargetT, _DataT],
+ ConcurrentProcessing[_TargetP, _TargetT, _DataT],
]
WithParamReturn = Callable[
[TargetFunction[_DataT, _TargetP, _TargetT]],
@@ -37,14 +37,15 @@
]
FullParamReturn = Callable[
Concatenate[Dataset[_DataT], _TargetP],
- ParallelProcessing[_TargetP, _TargetT, _DataT],
+ ConcurrentProcessing[_TargetP, _TargetT, _DataT],
]
@overload
def processor(
__function: TargetFunction[_DataT, _TargetP, _TargetT],
-) -> NoParamReturn[_DataT, _TargetP, _TargetT]: ...
+) -> NoParamReturn[_DataT, _TargetP, _TargetT]:
+ ...
@overload
@@ -55,7 +56,8 @@ def processor(
ignore_errors: Sequence[type[Exception]] = (),
suppress_errors: bool = False,
**overflow_kwargs: Overflow_In,
-) -> WithParamReturn[_DataT, _TargetP, _TargetT]: ...
+) -> WithParamReturn[_DataT, _TargetP, _TargetT]:
+ ...
@overload
@@ -67,7 +69,8 @@ def processor(
ignore_errors: Sequence[type[Exception]] = (),
suppress_errors: bool = False,
**overflow_kwargs: Overflow_In,
-) -> FullParamReturn[_DataT, _TargetP, _TargetT]: ...
+) -> FullParamReturn[_DataT, _TargetP, _TargetT]:
+ ...
def processor(
@@ -150,7 +153,7 @@ def wrapped(
data: Dataset[_DataT],
*parsed_args: _TargetP.args,
**parsed_kwargs: _TargetP.kwargs,
- ) -> ParallelProcessing[_TargetP, _TargetT, _DataT]:
+ ) -> ConcurrentProcessing[_TargetP, _TargetT, _DataT]:
kwargs.update(parsed_kwargs)
processed_args = (*args, *parsed_args)
@@ -158,7 +161,7 @@ def wrapped(
i: v for i, v in kwargs.items() if i not in ['args', 'kwargs']
}
- job = ParallelProcessing(
+ job = ConcurrentProcessing(
function=__function,
dataset=data,
args=processed_args,
diff --git a/src/thread/decorators/_threaded.py b/src/thread/decorators/_threaded.py
index 4db6ef1..8c04ee0 100644
--- a/src/thread/decorators/_threaded.py
+++ b/src/thread/decorators/_threaded.py
@@ -1,7 +1,7 @@
"""
## Threaded
-Documentation: https://thread.ngjx.org/docs/v1.1.1
+Documentation: https://thread.ngjx.org/docs/v2.0.0
"""
from functools import wraps
@@ -23,7 +23,8 @@
@overload
-def threaded(__function: TargetFunction[P, T]) -> NoParamReturn[P, T]: ...
+def threaded(__function: TargetFunction[P, T]) -> NoParamReturn[P, T]:
+ ...
@overload
@@ -34,7 +35,8 @@ def threaded(
ignore_errors: Sequence[type[Exception]] = (),
suppress_errors: bool = False,
**overflow_kwargs: Overflow_In,
-) -> WithParamReturn[P, T]: ...
+) -> WithParamReturn[P, T]:
+ ...
@overload
@@ -46,7 +48,8 @@ def threaded(
ignore_errors: Sequence[type[Exception]] = (),
suppress_errors: bool = False,
**overflow_kwargs: Overflow_In,
-) -> FullParamReturn[P, T]: ...
+) -> FullParamReturn[P, T]:
+ ...
def threaded(
diff --git a/src/thread/exceptions.py b/src/thread/exceptions.py
index 89ee6ae..a44160a 100644
--- a/src/thread/exceptions.py
+++ b/src/thread/exceptions.py
@@ -1,7 +1,7 @@
"""
## Thread Exceptions
-Documentation: https://thread.ngjx.org/docs/v1.1.1
+Documentation: https://thread.ngjx.org/docs/v2.0.0
"""
import traceback
diff --git a/src/thread/thread.py b/src/thread/thread.py
index 893991f..0aa7590 100644
--- a/src/thread/thread.py
+++ b/src/thread/thread.py
@@ -5,10 +5,10 @@
class Thread: ...
-class ParallelProcessing: ...
+class ConcurrentProcessing: ...
```
-Documentation: https://thread.ngjx.org/docs/v1.1.1
+Documentation: https://thread.ngjx.org/docs/v2.0.0
"""
import sys
@@ -230,7 +230,7 @@ def add_hook(self, hook: HookFunction[_Target_T]) -> None:
"""
self.hooks.append(hook)
- def join(self, timeout: Optional[float] = None) -> bool:
+ def join(self, timeout: Optional[float] = None) -> None:
"""
Halts the current thread execution until a thread completes or exceeds the timeout
@@ -238,10 +238,6 @@ def join(self, timeout: Optional[float] = None) -> bool:
----------
:param timeout: The maximum time allowed to halt the thread
- Returns
- -------
- :returns bool: True if the thread is no-longer alive
-
Raises
------
ThreadNotInitializedError: If the thread is not initialized
@@ -255,7 +251,6 @@ def join(self, timeout: Optional[float] = None) -> bool:
super().join(timeout)
self._handle_exceptions()
- return not self.is_alive()
def get_return_value(self) -> _Target_T:
"""
@@ -341,9 +336,9 @@ def __init__(self, thread: Thread, progress: float = 0) -> None:
self.progress = progress
-class ParallelProcessing(Generic[_Target_P, _Target_T, _Dataset_T]):
+class ConcurrentProcessing(Generic[_Target_P, _Target_T, _Dataset_T]):
"""
- Multi-Threaded Parallel Processing
+ Concurrent Processing
---------------------------------------
Type-Safe and provides more functionality on top
@@ -378,7 +373,8 @@ def __init__(
_get_value: Optional[Callable[[LengthandGetLike_T, int], _Dataset_T]] = None,
_length: Optional[Union[int, Callable[[Any], int]]] = None,
**overflow_kwargs: Overflow_In,
- ) -> None: ...
+ ) -> None:
+ ...
# Has __len__, require _get_value to be set
@overload
@@ -391,7 +387,8 @@ def __init__(
_get_value: Callable[[LengthLike_T, int], _Dataset_T],
_length: Optional[Union[int, Callable[[Any], int]]] = None,
**overflow_kwargs: Overflow_In,
- ) -> None: ...
+ ) -> None:
+ ...
# Has __getitem__, require _length to be set
@overload
@@ -404,7 +401,8 @@ def __init__(
_get_value: Optional[Callable[[GetLike_T, int], _Dataset_T]] = None,
_length: Union[int, Callable[[GetLike_T], int]],
**overflow_kwargs: Overflow_In,
- ) -> None: ...
+ ) -> None:
+ ...
# Does not support __getitem__ and __len__
@overload
@@ -417,7 +415,8 @@ def __init__(
_get_value: Callable[[Any, int], _Dataset_T],
_length: Union[int, Callable[[Any], int]],
**overflow_kwargs: Overflow_In,
- ) -> None: ...
+ ) -> None:
+ ...
def __init__(
self,
@@ -442,10 +441,10 @@ def __init__(
**overflow_kwargs: Overflow_In,
) -> None:
"""
- Initializes a new Multi-Threaded Pool\n
+ Initializes a new Concurrent Process\n
Best for data processing
- Splits a dataset as evenly as it can among the threads and run them in parallel
+ Splits a dataset as evenly as it can among the threads and run them concurrently
Parameters
----------
@@ -598,14 +597,10 @@ def get_return_values(self) -> List[_Dataset_T]:
results += entry.thread.result
return results
- def join(self) -> bool:
+ def join(self) -> None:
"""
Halts the current thread execution until a thread completes or exceeds the timeout
- Returns
- -------
- :returns bool: True if the thread is no-longer alive
-
Raises
------
ThreadNotInitializedError: If the thread is not initialized
@@ -619,7 +614,6 @@ def join(self) -> bool:
for entry in self._threads:
entry.thread.join()
- return True
def kill(self) -> None:
"""
diff --git a/tests/test_parallelprocessing.py b/tests/test_concurrentprocessing.py
similarity index 89%
rename from tests/test_parallelprocessing.py
rename to tests/test_concurrentprocessing.py
index 62c729d..eba5ea6 100644
--- a/tests/test_parallelprocessing.py
+++ b/tests/test_concurrentprocessing.py
@@ -1,6 +1,6 @@
import time
import pytest
-from src.thread import ParallelProcessing, exceptions
+from src.thread import ConcurrentProcessing, exceptions
# >>>>>>>>>> Dummy Functions <<<<<<<<<< #
@@ -18,7 +18,7 @@ def _dummy_raiseException(x: Exception, delay: float = 0):
def test_threadsScaleDown():
"""This test is for testing if threads scale down `max_threads` when the dataset is lesser than the thread count"""
dataset = list(range(0, 2))
- new = ParallelProcessing(
+ new = ConcurrentProcessing(
function=_dummy_dataProcessor,
dataset=dataset,
max_threads=4,
@@ -32,7 +32,7 @@ def test_threadsScaleDown():
def test_threadsProcessing():
"""This test is for testing if threads correctly order data in the `dataset` arrangement"""
dataset = list(range(0, 500))
- new = ParallelProcessing(
+ new = ConcurrentProcessing(
function=_dummy_dataProcessor, dataset=dataset, args=[0.001], daemon=True
)
new.start()
@@ -43,7 +43,7 @@ def test_threadsProcessing():
def test_raises_StillRunningError():
"""This test should raise ThreadStillRunningError"""
dataset = list(range(0, 8))
- new = ParallelProcessing(
+ new = ConcurrentProcessing(
function=_dummy_dataProcessor, dataset=dataset, args=[1], daemon=True
)
new.start()
@@ -54,7 +54,7 @@ def test_raises_StillRunningError():
def test_raises_RunTimeError():
"""This test should raise a RunTimeError"""
dataset = [RuntimeError()] * 8
- new = ParallelProcessing(
+ new = ConcurrentProcessing(
function=_dummy_raiseException, dataset=dataset, args=[0.01], daemon=True
)
with pytest.raises(RuntimeError):
diff --git a/tests/test_dataframe_compatibility.py b/tests/test_dataframe_compatibility.py
index 82a7baf..80406c3 100644
--- a/tests/test_dataframe_compatibility.py
+++ b/tests/test_dataframe_compatibility.py
@@ -1,6 +1,6 @@
import typing
import pytest
-from src.thread import ParallelProcessing
+from src.thread import ConcurrentProcessing
class DummyLengthOnly:
@@ -33,18 +33,21 @@ def __init__(self, length: typing.Any, dataset: list):
class DummyUnlikeSequence1:
- def __init__(self) -> None: ...
+ def __init__(self) -> None:
+ ...
class DummyUnlikeSequence2:
- def __init__(self) -> None: ...
+ def __init__(self) -> None:
+ ...
+
def __str__(self) -> str:
return 'invalid'
# >>>>>>>>>> Length Only <<<<<<<<<< #
def test_LO_init() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10),
_get_value=lambda *_: _,
@@ -53,7 +56,7 @@ def test_LO_init() -> None:
def test_LO_init_missingGetValueError_nothing() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10), # type: ignore
)
@@ -61,7 +64,7 @@ def test_LO_init_missingGetValueError_nothing() -> None:
def test_LO_init_missingGetValueError_lengthNum() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10), # type: ignore
_length=1,
@@ -70,7 +73,7 @@ def test_LO_init_missingGetValueError_lengthNum() -> None:
def test_LO_init_missingGetValueError_lengthFunc() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10), # type: ignore
_length=lambda _: 1,
@@ -79,7 +82,7 @@ def test_LO_init_missingGetValueError_lengthFunc() -> None:
def test_LO_init_invalidLengthValueError_negative() -> None:
with pytest.raises(ValueError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(-10),
_get_value=lambda *_: _,
@@ -88,7 +91,7 @@ def test_LO_init_invalidLengthValueError_negative() -> None:
def test_LO_init_invalidLengthValueError_zero() -> None:
with pytest.raises(ValueError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(0),
_get_value=lambda *_: _,
@@ -97,7 +100,7 @@ def test_LO_init_invalidLengthValueError_zero() -> None:
def test_LO_init_nonIntLengthError_numLike() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10.5),
_get_value=lambda *_: _,
@@ -106,7 +109,7 @@ def test_LO_init_nonIntLengthError_numLike() -> None:
def test_LO_init_nonIntLengthError() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly('10'),
_get_value=lambda *_: _,
@@ -118,7 +121,7 @@ def validate(x, i):
assert isinstance(x, DummyLengthOnly)
assert isinstance(i, int)
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10),
_get_value=validate,
@@ -128,7 +131,7 @@ def validate(x, i):
def test_LO_len() -> None:
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyLengthOnly(10),
_get_value=lambda *_: _,
@@ -138,18 +141,20 @@ def test_LO_len() -> None:
# >>>>>>>>>> Get Only <<<<<<<<<< #
def test_GO_init_int() -> None:
- ParallelProcessing(function=lambda x: x, dataset=DummyGetOnly([1, 2, 3]), _length=3)
+ ConcurrentProcessing(
+ function=lambda x: x, dataset=DummyGetOnly([1, 2, 3]), _length=3
+ )
def test_GO_init_func() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x, dataset=DummyGetOnly([1, 2, 3]), _length=lambda _: 3
)
def test_GO_init_missingLengthError() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]), # type: ignore
)
@@ -157,7 +162,7 @@ def test_GO_init_missingLengthError() -> None:
def test_GO_init_nonIntLengthError_strLike() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length='10', # type: ignore
@@ -166,7 +171,7 @@ def test_GO_init_nonIntLengthError_strLike() -> None:
def test_GO_init_nonIntLengthError_numLike() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length=10.5, # type: ignore
@@ -175,7 +180,7 @@ def test_GO_init_nonIntLengthError_numLike() -> None:
def test_GO_init_nonIntLengthError_negative() -> None:
with pytest.raises(ValueError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length=-10, # type: ignore
@@ -187,7 +192,7 @@ def validate(x, i):
assert isinstance(x, DummyGetOnly)
assert isinstance(i, int)
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length=3,
@@ -198,7 +203,7 @@ def validate(x, i):
def test_GO_len() -> None:
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length=3,
@@ -211,7 +216,7 @@ def test_GO_get() -> None:
def get(*_):
return _
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyGetOnly([1, 2, 3]),
_length=3,
@@ -222,21 +227,21 @@ def get(*_):
# >>>>>>>> Sequence Like <<<<<<<< #
def test_SO_init() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
)
def test_SO_init_list() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=[1, 2, 3],
)
def test_SO_init_tuple() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=(1, 2, 3),
)
@@ -244,28 +249,28 @@ def test_SO_init_tuple() -> None:
def test_SO_init_set() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=set([1, 2, 3]), # type: ignore
)
def test_SO_init_dict() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset={1: 1, 2: 2, 3: 3}, # type: ignore
)
def test_SO_init_str() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset='123',
)
def test_SO_init_withLength() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
_length=10,
@@ -273,7 +278,7 @@ def test_SO_init_withLength() -> None:
def test_SO_init_withGet() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
_get_value=lambda *_: _,
@@ -281,7 +286,7 @@ def test_SO_init_withGet() -> None:
def test_SO_init_withLengthAndGet() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
_length=10,
@@ -290,7 +295,7 @@ def test_SO_init_withLengthAndGet() -> None:
def test_SO_len() -> None:
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
)
@@ -302,7 +307,7 @@ def validate(x, i):
assert isinstance(x, DummySequenceLike)
assert isinstance(i, int)
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummySequenceLike(10, list(range(10))),
_get_value=validate,
@@ -314,7 +319,7 @@ def validate(x, i):
# >>>>>>>>>> Unlike Sequence <<<<<<<<<< #
def test_UO_init_clean() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(), # type: ignore
)
@@ -322,7 +327,7 @@ def test_UO_init_clean() -> None:
def test_UO_init_withOtherMethods() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence2(), # type: ignore
)
@@ -330,7 +335,7 @@ def test_UO_init_withOtherMethods() -> None:
def test_UO_init_onlyLength() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(), # type: ignore
_length=10,
@@ -339,7 +344,7 @@ def test_UO_init_onlyLength() -> None:
def test_UO_init_onlyGet() -> None:
with pytest.raises(TypeError):
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(), # type: ignore
_get_value=lambda *_: _,
@@ -347,7 +352,7 @@ def test_UO_init_onlyGet() -> None:
def test_UO_init_onlyLengthAndGet() -> None:
- ParallelProcessing(
+ ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(), # type: ignore
_length=10,
@@ -356,7 +361,7 @@ def test_UO_init_onlyLengthAndGet() -> None:
def test_UO_lengthInt() -> None:
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(),
_length=10,
@@ -366,7 +371,7 @@ def test_UO_lengthInt() -> None:
def test_UO_lengthFunc() -> None:
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(),
_length=lambda _: 10,
@@ -380,7 +385,7 @@ def validate(x, i):
assert isinstance(x, DummyUnlikeSequence1)
assert isinstance(i, int)
- process = ParallelProcessing(
+ process = ConcurrentProcessing(
function=lambda x: x,
dataset=DummyUnlikeSequence1(),
_length=10,
diff --git a/tests/test_thread.py b/tests/test_thread.py
index 7873c1c..af99593 100644
--- a/tests/test_thread.py
+++ b/tests/test_thread.py
@@ -21,7 +21,8 @@ def test_threadCreation():
target=_dummy_target_raiseToPower, args=[4], kwargs={'power': 2}, daemon=True
)
new.start()
- assert new.join()
+ new.join()
+ assert not new.is_alive()
assert new.result == 16