test_processing.py 2.9KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107
  1. # coding: utf-8
  2. import os
  3. from synergine2.processing import ProcessManager
  4. from synergine2.utils import ChunkManager
  5. from tests import BaseTest
  6. class MyFakeClass(object):
  7. def __init__(self, value):
  8. self.value = value
  9. class TestProcessing(BaseTest):
  10. @staticmethod
  11. def _make_job_with_scalar(
  12. data_chunk: list,
  13. process_number: int,
  14. process_count: int,
  15. ) -> tuple:
  16. current_pid = os.getpid()
  17. result = sum(data_chunk)
  18. return current_pid, result
  19. @staticmethod
  20. def _make_job_with_object(
  21. data_chunk: list,
  22. process_number: int,
  23. process_count: int,
  24. ) -> tuple:
  25. current_pid = os.getpid()
  26. data = [o.value for o in data_chunk]
  27. result = sum(data)
  28. return current_pid, MyFakeClass(result)
  29. def test_parallel_jobs_with_scalar(self):
  30. chunk_manager = ChunkManager(4)
  31. process_manager = ProcessManager(
  32. process_count=4,
  33. chunk_manager=chunk_manager,
  34. )
  35. data = list(range(100))
  36. process_id_list = []
  37. final_result = 0
  38. results = process_manager.chunk_and_execute_jobs(
  39. data,
  40. job_maker=self._make_job_with_scalar,
  41. )
  42. for process_id, result in results:
  43. final_result += result
  44. process_id_list.append(process_id)
  45. # Test each process ids are differents
  46. assert sorted(process_id_list) == \
  47. sorted(list(set(process_id_list)))
  48. # Goal is 4950
  49. assert final_result == 4950
  50. def test_non_parallel_jobs_with_scalar(self):
  51. chunk_manager = ChunkManager(1)
  52. process_manager = ProcessManager(
  53. process_count=1,
  54. chunk_manager=chunk_manager,
  55. )
  56. data = list(range(100))
  57. results = process_manager.chunk_and_execute_jobs(
  58. data,
  59. job_maker=self._make_job_with_scalar,
  60. )
  61. process_id, final_result = results[0]
  62. assert len(results) == 1
  63. assert process_id == os.getpid()
  64. assert final_result == 4950
  65. def test_parallel_jobs_with_objects(self):
  66. chunk_manager = ChunkManager(4)
  67. process_manager = ProcessManager(
  68. process_count=4,
  69. chunk_manager=chunk_manager,
  70. )
  71. data = [MyFakeClass(v) for v in range(100)]
  72. process_id_list = []
  73. final_result = 0
  74. results = process_manager.chunk_and_execute_jobs(
  75. data,
  76. job_maker=self._make_job_with_object,
  77. )
  78. for process_id, result_object in results:
  79. final_result += result_object.value
  80. process_id_list.append(process_id)
  81. # Test each process ids are differents
  82. assert sorted(process_id_list) == \
  83. sorted(list(set(process_id_list)))
  84. # Goal is 4950
  85. assert final_result == 4950