test_processing.py 3.1KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114
  1. # coding: utf-8
  2. import os
  3. import multiprocessing
  4. import psutil
  5. import pytest
  6. from synergine2.processing import ProcessManager
  7. from synergine2.utils import ChunkManager
  8. from tests import BaseTest
  9. class MyFakeClass(object):
  10. def __init__(self, value):
  11. self.value = value
  12. class TestProcessing(BaseTest):
  13. def make_job_with_scalar(
  14. self,
  15. data_chunk: list,
  16. process_number: int,
  17. process_count: int,
  18. ) -> tuple:
  19. current_pid = os.getpid()
  20. result = sum(data_chunk)
  21. return current_pid, result
  22. def make_job_with_object(
  23. self,
  24. data_chunk: list,
  25. process_number: int,
  26. process_count: int,
  27. ) -> tuple:
  28. current_pid = os.getpid()
  29. data = [o.value for o in data_chunk]
  30. result = sum(data)
  31. return current_pid, MyFakeClass(result)
  32. @pytest.mark.skipif(len(psutil.Process().cpu_affinity()) < 2, reason="requires 2 or more cpus")
  33. def test_parallel_jobs_with_scalar(self):
  34. chunk_manager = ChunkManager(2)
  35. process_manager = ProcessManager(
  36. process_count=2,
  37. chunk_manager=chunk_manager,
  38. )
  39. data = list(range(100))
  40. process_id_list = []
  41. final_result = 0
  42. results = process_manager.chunk_and_execute_jobs(
  43. data,
  44. job_maker=self.make_job_with_scalar,
  45. )
  46. for process_id, result in results:
  47. final_result += result
  48. process_id_list.append(process_id)
  49. # Test each process ids are differents
  50. assert sorted(process_id_list) == \
  51. sorted(list(set(process_id_list)))
  52. # Goal is 4950
  53. assert final_result == 4950
  54. def test_non_parallel_jobs_with_scalar(self):
  55. chunk_manager = ChunkManager(1)
  56. process_manager = ProcessManager(
  57. process_count=1,
  58. chunk_manager=chunk_manager,
  59. )
  60. data = list(range(100))
  61. results = process_manager.chunk_and_execute_jobs(
  62. data,
  63. job_maker=self.make_job_with_scalar,
  64. )
  65. process_id, final_result = results[0]
  66. assert len(results) == 1
  67. assert process_id == os.getpid()
  68. assert final_result == 4950
  69. @pytest.mark.skipif(len(psutil.Process().cpu_affinity()) < 2, reason="requires 2 or more cpus")
  70. def test_parallel_jobs_with_objects(self):
  71. chunk_manager = ChunkManager(4)
  72. process_manager = ProcessManager(
  73. process_count=4,
  74. chunk_manager=chunk_manager,
  75. )
  76. data = [MyFakeClass(v) for v in range(100)]
  77. process_id_list = []
  78. final_result = 0
  79. results = process_manager.chunk_and_execute_jobs(
  80. data,
  81. job_maker=self.make_job_with_object,
  82. )
  83. for process_id, result_object in results:
  84. final_result += result_object.value
  85. process_id_list.append(process_id)
  86. # Test each process ids are differents
  87. assert sorted(process_id_list) == \
  88. sorted(list(set(process_id_list)))
  89. # Goal is 4950
  90. assert final_result == 4950