Browse Source

DEV: shared data with cycle processing

Bastien Sevajol 7 years ago
parent
commit
6b69ce2217

+ 161 - 99
synergine2/cycle.py View File

1
 # coding: utf-8
1
 # coding: utf-8
2
 import multiprocessing
2
 import multiprocessing
3
+import typing
3
 
4
 
4
 from synergine2.base import BaseObject
5
 from synergine2.base import BaseObject
5
 from synergine2.config import Config
6
 from synergine2.config import Config
7
+from synergine2.exceptions import SynergineException
6
 from synergine2.log import SynergineLogger
8
 from synergine2.log import SynergineLogger
7
 from synergine2.processing import ProcessManager
9
 from synergine2.processing import ProcessManager
8
-from synergine2.simulation import SimulationMechanism
9
-from synergine2.simulation import SimulationBehaviour
10
+from synergine2.share import shared
11
+from synergine2.simulation import Subject
10
 from synergine2.simulation import Simulation
12
 from synergine2.simulation import Simulation
11
 from synergine2.simulation import SubjectBehaviour
13
 from synergine2.simulation import SubjectBehaviour
12
 from synergine2.simulation import SubjectMechanism
14
 from synergine2.simulation import SubjectMechanism
13
 from synergine2.simulation import Event
15
 from synergine2.simulation import Event
14
-from synergine2.utils import ChunkManager, time_it
16
+from synergine2.utils import time_it
17
+
18
+
19
+JOB_TYPE_SUBJECTS = 0
20
+JOB_TYPE_SIMULATION = 1
15
 
21
 
16
 
22
 
17
 class CycleManager(BaseObject):
23
 class CycleManager(BaseObject):
22
             simulation: Simulation,
28
             simulation: Simulation,
23
             process_manager: ProcessManager=None,
29
             process_manager: ProcessManager=None,
24
     ):
30
     ):
31
+        # TODO: reproduire le mechanisme d'index de behaviour/etc pour simulation
32
+        self.config = config
33
+        self.logger = logger
34
+        self.simulation = simulation
35
+        self.current_cycle = -1
36
+        self.first_cycle = True
37
+
38
+        self.subject_mechanisms_cache = {}  # type: typing.Dict[int, typing.Dict[str, SubjectMechanism]]
39
+        self.subject_behaviours_cache = {}  # type: typing.Dict[int, typing.Dict[str, SubjectBehaviour]]
40
+
41
+        # TODO NOW: Les processes devront maintenir une liste des subjects qui sont nouveaux.ne connaissent pas
42
+        # Attention a ce qu'in ne soient pas "expose" quand on créer ces subjects au sein du process.
43
+        # Ces subjects ont vocation à adopter l'id du vrau subject tout de suite après leur instanciation
25
         if process_manager is None:
44
         if process_manager is None:
26
             process_manager = ProcessManager(
45
             process_manager = ProcessManager(
27
                 config=config,
46
                 config=config,
28
                 # TODO: Changer de config de merde (core.use_x_cores)
47
                 # TODO: Changer de config de merde (core.use_x_cores)
29
                 process_count=config.get('core', {}).get('use_x_cores', multiprocessing.cpu_count()),
48
                 process_count=config.get('core', {}).get('use_x_cores', multiprocessing.cpu_count()),
30
-                chunk_manager=ChunkManager(multiprocessing.cpu_count()),
49
+                job=self.job,
31
             )
50
             )
32
-
33
-        self.config = config
34
-        self.logger = logger
35
-        self.simulation = simulation
36
         self.process_manager = process_manager
51
         self.process_manager = process_manager
37
-        self.current_cycle = -1
38
-        self.first_cycle = True
52
+
53
+    def job(self, worker_id: int, process_count: int, job_type: str) -> 'TODO':
54
+        # ICI: (in process) on doit avoir:
55
+        # La tranche x:y de sujets à traiter
56
+        shared.refresh()
57
+        if job_type == JOB_TYPE_SUBJECTS:
58
+            return self._job_subjects(worker_id, process_count)
59
+        if job_type == JOB_TYPE_SIMULATION:
60
+            return self._job_simulation(worker_id, process_count)
61
+        raise SynergineException('Unknown job type "{}"'.format(job_type))
62
+
63
+    def _job_subjects(self, worker_id: int, process_count: int) -> typing.Dict[int, typing.Dict[str, typing.Any]]:
64
+        # Determine list of process subject to work with
65
+        subject_ids = shared.get('subject_ids')
66
+        chunk_length, rest = divmod(len(subject_ids), process_count)
67
+
68
+        from_ = chunk_length * worker_id
69
+        to_ = from_ + chunk_length
70
+
71
+        if worker_id + 1 == process_count:
72
+            to_ += rest
73
+
74
+        subject_ids_to_parse = subject_ids[from_:to_]
75
+
76
+        # Build list of subjects for compute them
77
+        subjects = []
78
+        for subject_id in subject_ids_to_parse:
79
+            subject = self.simulation.get_or_create_subject(subject_id)
80
+            subjects.append(subject)
81
+
82
+        results_by_subjects = self._subjects_computing(subjects)
83
+        return results_by_subjects
84
+
85
+    def _job_simulation(self, worker_id: int, process_count: int) -> typing.Dict[int, typing.Dict[str, typing.Any]]:
86
+        self.logger.info('Simulation computing (worker {})'.format(worker_id))
87
+
88
+        mechanisms = self.simulation.mechanisms.values()
89
+        mechanisms_data = {}
90
+        behaviours_data = {}
91
+
92
+        self.logger.info('{} mechanisms to compute'.format(str(len(mechanisms))))
93
+        if self.logger.is_debug:
94
+            self.logger.debug('Mechanisms are: {}'.format(
95
+                str([m.repr_debug() for m in mechanisms])
96
+            ))
97
+
98
+        for mechanism in mechanisms:
99
+            mechanism_data = mechanism.run(
100
+                process_number=worker_id,
101
+                process_count=process_count,
102
+            )
103
+
104
+            if self.logger.is_debug:
105
+                self.logger.debug('{} mechanism product data: {}'.format(
106
+                    type(mechanism).__name__,
107
+                    str(mechanism_data),
108
+                ))
109
+
110
+            mechanisms_data[type(mechanism)] = mechanism_data
111
+
112
+        behaviours = self.simulation.behaviours.values()
113
+        self.logger.info('{} behaviours to compute'.format(str(len(behaviours))))
114
+
115
+        if self.logger.is_debug:
116
+            self.logger.debug('Behaviours are: {}'.format(
117
+                str([b.repr_debug() for b in behaviours])
118
+            ))
119
+
120
+        for behaviour in behaviours:
121
+            behaviour_data = behaviour.run(mechanisms_data)  # TODO: Behaviours dependencies
122
+            if self.logger.is_debug:
123
+                self.logger.debug('{} behaviour produce data: {}'.format(
124
+                    type(behaviour).__name__,
125
+                    behaviour_data,
126
+                ))
127
+
128
+            if behaviour_data:
129
+                behaviours_data[type(behaviour)] = behaviour_data
130
+
131
+        return behaviours_data
39
 
132
 
40
     def next(self) -> [Event]:
133
     def next(self) -> [Event]:
41
         if self.first_cycle:
134
         if self.first_cycle:
47
         self.logger.info('Process cycle {}'.format(self.current_cycle))
140
         self.logger.info('Process cycle {}'.format(self.current_cycle))
48
 
141
 
49
         events = []
142
         events = []
143
+        shared.commit()
144
+
50
         # TODO: gestion des behaviours non parallelisables
145
         # TODO: gestion des behaviours non parallelisables
51
         # TODO: Proposer des ordres d'execution
146
         # TODO: Proposer des ordres d'execution
52
         with time_it() as elapsed_time:
147
         with time_it() as elapsed_time:
69
 
164
 
70
         self.logger.info('Process simulation events')
165
         self.logger.info('Process simulation events')
71
 
166
 
72
-        results_by_processes = self.process_manager.execute_jobs(
73
-            data=self.simulation,
74
-            job_maker=self.simulation_computing,
75
-        )
167
+        # TODO: Think about compute simulation cycle in workers
168
+        results_by_processes = self.process_manager.make_them_work(JOB_TYPE_SIMULATION)
76
 
169
 
77
         for process_result in results_by_processes:
170
         for process_result in results_by_processes:
78
             for behaviour_class, behaviour_result in process_result.items():
171
             for behaviour_class, behaviour_result in process_result.items():
107
         results = {}
200
         results = {}
108
 
201
 
109
         self.logger.info('Process subjects events')
202
         self.logger.info('Process subjects events')
110
-
111
-        results_by_processes = self.process_manager.chunk_and_execute_jobs(
112
-            data=self.simulation.subjects,
113
-            job_maker=self.subjects_computing,
114
-        )
203
+        results_by_processes = self.process_manager.make_them_work(JOB_TYPE_SUBJECTS)
115
 
204
 
116
         for process_results in results_by_processes:
205
         for process_results in results_by_processes:
117
             results.update(process_results)
206
             results.update(process_results)
118
 
207
 
119
         # Duplicate list to prevent conflicts with behaviours subjects manipulations
208
         # Duplicate list to prevent conflicts with behaviours subjects manipulations
120
         for subject in self.simulation.subjects[:]:
209
         for subject in self.simulation.subjects[:]:
121
-            subject_behaviours = results.get(subject.id, {})
210
+            subject_behaviours_results = results.get(subject.id, {})
122
             if subject.behaviour_selector:
211
             if subject.behaviour_selector:
123
                 # TODO: Looging
212
                 # TODO: Looging
124
-                subject_behaviours = subject.behaviour_selector.reduce_behaviours(dict(subject_behaviours))
213
+                subject_behaviours_results = subject.behaviour_selector.reduce_behaviours(dict(
214
+                    subject_behaviours_results,
215
+                ))
125
 
216
 
126
-            for behaviour_class, behaviour_data in subject_behaviours.items():
217
+            subject_behaviours = self.get_subject_behaviours(subject)
218
+            for behaviour_class_name, behaviour_data in subject_behaviours_results.items():
127
                 # TODO: Ajouter une etape de selection des actions a faire (genre neuronnal)
219
                 # TODO: Ajouter une etape de selection des actions a faire (genre neuronnal)
128
                 # (genre se cacher et fuir son pas compatibles)
220
                 # (genre se cacher et fuir son pas compatibles)
129
-                behaviour_events = subject.behaviours[behaviour_class].action(behaviour_data)
221
+                behaviour_events = subject_behaviours[behaviour_class_name].action(behaviour_data)
130
 
222
 
131
                 self.logger.info('{} behaviour for subject {} generate {} events'.format(
223
                 self.logger.info('{} behaviour for subject {} generate {} events'.format(
132
-                    str(behaviour_class),
224
+                    str(behaviour_class_name),
133
                     str(subject.id),
225
                     str(subject.id),
134
                     str(len(behaviour_events)),
226
                     str(len(behaviour_events)),
135
                 ))
227
                 ))
136
 
228
 
137
                 if self.logger.is_debug:
229
                 if self.logger.is_debug:
138
                     self.logger.debug('{} behaviour for subject {} generated events: {}'.format(
230
                     self.logger.debug('{} behaviour for subject {} generated events: {}'.format(
139
-                        str(behaviour_class),
231
+                        str(behaviour_class_name),
140
                         str(subject.id),
232
                         str(subject.id),
141
                         str([e.repr_debug() for e in behaviour_events]),
233
                         str([e.repr_debug() for e in behaviour_events]),
142
                     ))
234
                     ))
146
         self.logger.info('Subjects behaviours generate {} events'.format(len(events)))
238
         self.logger.info('Subjects behaviours generate {} events'.format(len(events)))
147
         return events
239
         return events
148
 
240
 
149
-    def simulation_computing(
150
-            self,
151
-            simulation,
152
-            process_number,
153
-            process_count,
154
-    ):
155
-        self.logger.info('Simulation computing')
156
-
157
-        # TODO: necessaire de passer simulation ?
158
-        mechanisms = self.get_mechanisms_to_compute(simulation)
159
-        mechanisms_data = {}
160
-        behaviours_data = {}
161
-
162
-        self.logger.info('{} mechanisms to compute'.format(str(len(mechanisms))))
163
-        if self.logger.is_debug:
164
-            self.logger.debug('Mechanisms are: {}'.format(
165
-                str([m.repr_debug() for m in mechanisms])
166
-            ))
167
-
168
-        for mechanism in mechanisms:
169
-            mechanism_data = mechanism.run(
170
-                process_number=process_number,
171
-                process_count=process_count,
172
-            )
173
-
174
-            if self.logger.is_debug:
175
-                self.logger.debug('{} mechanism product data: {}'.format(
176
-                    type(mechanism).__name__,
177
-                    str(mechanism_data),
178
-                ))
179
-
180
-            mechanisms_data[type(mechanism)] = mechanism_data
181
-
182
-        behaviours = self.get_behaviours_to_compute(simulation)
183
-        self.logger.info('{} behaviours to compute'.format(str(len(behaviours))))
184
-
185
-        if self.logger.is_debug:
186
-            self.logger.debug('Behaviours are: {}'.format(
187
-                str([b.repr_debug() for b in behaviours])
188
-            ))
189
-
190
-        for behaviour in behaviours:
191
-            behaviour_data = behaviour.run(mechanisms_data)  # TODO: Behaviours dependencies
192
-            if self.logger.is_debug:
193
-                self.logger.debug('{} behaviour produce data: {}'.format(
194
-                    type(behaviour).__name__,
195
-                    behaviour_data,
196
-                ))
197
-
198
-            if behaviour_data:
199
-                behaviours_data[type(behaviour)] = behaviour_data
200
-
201
-        return behaviours_data
202
-
203
-    def subjects_computing(
241
+    def _subjects_computing(
204
             self,
242
             self,
205
             subjects,
243
             subjects,
206
             process_number=None,
244
             process_number=None,
207
             process_count=None,
245
             process_count=None,
208
-    ):
246
+    ) -> typing.Dict[int, typing.Dict[str, typing.Any]]:
209
         results = {}
247
         results = {}
210
         self.logger.info('Subjects computing: {} subjects to compute'.format(str(len(subjects))))
248
         self.logger.info('Subjects computing: {} subjects to compute'.format(str(len(subjects))))
211
 
249
 
212
         for subject in subjects:
250
         for subject in subjects:
213
-            mechanisms = self.get_mechanisms_to_compute(subject)
251
+            mechanisms = self.get_subject_mechanisms(subject)
214
 
252
 
215
             if mechanisms:
253
             if mechanisms:
216
                 self.logger.info('Subject {}: {} mechanisms'.format(
254
                 self.logger.info('Subject {}: {} mechanisms'.format(
221
                 if self.logger.is_debug:
259
                 if self.logger.is_debug:
222
                     self.logger.info('Subject {}: mechanisms are: {}'.format(
260
                     self.logger.info('Subject {}: mechanisms are: {}'.format(
223
                         str(subject.id),
261
                         str(subject.id),
224
-                        str([m.repr_debug for m in mechanisms])
262
+                        str([m.repr_debug for n, m in mechanisms.items()])
225
                     ))
263
                     ))
226
 
264
 
227
             mechanisms_data = {}
265
             mechanisms_data = {}
228
             behaviours_data = {}
266
             behaviours_data = {}
229
 
267
 
230
-            for mechanism in mechanisms:
268
+            for mechanism_class_name, mechanism in mechanisms.items():
231
                 with time_it() as elapsed_time:
269
                 with time_it() as elapsed_time:
232
                     mechanism_data = mechanism.run()
270
                     mechanism_data = mechanism.run()
233
                 if self.logger.is_debug:
271
                 if self.logger.is_debug:
238
                         elapsed_time.get_final_time(),
276
                         elapsed_time.get_final_time(),
239
                     ))
277
                     ))
240
 
278
 
241
-                mechanisms_data[type(mechanism)] = mechanism_data
279
+                mechanisms_data[mechanism_class_name] = mechanism_data
242
 
280
 
243
             if mechanisms:
281
             if mechanisms:
244
                 if self.logger.is_debug:
282
                 if self.logger.is_debug:
247
                         str(mechanisms_data),
285
                         str(mechanisms_data),
248
                     ))
286
                     ))
249
 
287
 
250
-            subject_behaviours = self.get_behaviours_to_compute(subject)
288
+            subject_behaviours = self.get_subject_behaviours(subject)
251
             if not subject_behaviours:
289
             if not subject_behaviours:
252
                 break
290
                 break
253
 
291
 
256
                 str(len(subject_behaviours)),
294
                 str(len(subject_behaviours)),
257
             ))
295
             ))
258
 
296
 
259
-            for behaviour in subject_behaviours:
297
+            for behaviour_class_name, behaviour in subject_behaviours.items():
260
                 self.logger.info('Subject {}: run {} behaviour'.format(
298
                 self.logger.info('Subject {}: run {} behaviour'.format(
261
                     str(subject.id),
299
                     str(subject.id),
262
                     str(type(behaviour)),
300
                     str(type(behaviour)),
275
                     ))
313
                     ))
276
 
314
 
277
                 if behaviour_data:
315
                 if behaviour_data:
278
-                    behaviours_data[type(behaviour)] = behaviour_data
316
+                    behaviours_data[behaviour_class_name] = behaviour_data
279
 
317
 
280
             results[subject.id] = behaviours_data
318
             results[subject.id] = behaviours_data
281
         return results
319
         return results
282
 
320
 
283
-    def get_mechanisms_to_compute(self, mechanisable) -> [SubjectMechanism, SimulationMechanism]:
321
+    def get_subject_mechanisms(self, subject: Subject) -> typing.Dict[str, SubjectMechanism]:
284
         # TODO: Implementer un systeme qui inhibe des mechanisme (ex. someil inhibe l'ouie)
322
         # TODO: Implementer un systeme qui inhibe des mechanisme (ex. someil inhibe l'ouie)
285
-        return mechanisable.mechanisms.values()
323
+        # Attention: c'est utilisé dans le main process aussi, pertinent de la faire là ?
324
+        try:
325
+            return self.subject_mechanisms_cache[subject.id]
326
+        except KeyError:
327
+            mechanisms = {}
328
+            for mechanism_class_id in shared.get('subject_mechanisms_index')[subject.id]:
329
+                mechanism_class = self.simulation.index[mechanism_class_id]
330
+                mechanism = mechanism_class(
331
+                    self.config,
332
+                    self.simulation,
333
+                    subject,
334
+                )
335
+                mechanisms[mechanism_class.__name__] = mechanism
336
+            self.subject_mechanisms_cache[subject.id] = mechanisms
337
+            return mechanisms
286
 
338
 
287
-    def get_behaviours_to_compute(self, mechanisable) -> [SubjectBehaviour, SimulationBehaviour]:
339
+    def get_subject_behaviours(self, subject: Subject) -> typing.Dict[str, SubjectBehaviour]:
288
         # TODO: Implementer un systeme qui inhibe des behaviours (ex. someil inhibe avoir faim)
340
         # TODO: Implementer un systeme qui inhibe des behaviours (ex. someil inhibe avoir faim)
289
-        behaviours = list(mechanisable.behaviours.values())
290
-
291
-        for behaviour in behaviours[:]:
292
-            if behaviour.frequency != 1:
293
-                if self.current_cycle % behaviour.frequency:
294
-                    behaviours.remove(behaviour)
295
-
296
-        return behaviours
341
+        # Attention: c'est utilisé dans le main process aussi, pertinent de la faire là ?
342
+        try:
343
+            return self.subject_behaviours_cache[subject.id]
344
+        except KeyError:
345
+            behaviours = {}
346
+            for behaviour_class_id in shared.get('subject_behaviours_index')[subject.id]:
347
+                behaviour_class = self.simulation.index[behaviour_class_id]
348
+                behaviour = behaviour_class(
349
+                    self.config,
350
+                    self.simulation,
351
+                    subject,
352
+                )
353
+                behaviours[behaviour_class.__name__] = behaviour
354
+            self.subject_behaviours_cache[subject.id] = behaviours
355
+            return behaviours
297
 
356
 
298
     def apply_actions(
357
     def apply_actions(
299
             self,
358
             self,
364
 
423
 
365
         self.logger.info('{} events generated'.format(len(events)))
424
         self.logger.info('{} events generated'.format(len(events)))
366
         return events
425
         return events
426
+
427
+    def stop(self) -> None:
428
+        self.process_manager.terminate()

+ 5 - 33
synergine2/processing.py View File

48
 
48
 
49
     def work(self, *args, **kwargs):
49
     def work(self, *args, **kwargs):
50
         while True:
50
         while True:
51
-            message = self.process_read_pipe.recv()
52
-            if message == STOP:
51
+            args = self.process_read_pipe.recv()
52
+            if args == STOP:
53
                 return
53
                 return
54
 
54
 
55
-            result = self.real_job(message)
55
+            result = self.real_job(*args)
56
             self.local_write_pipe.send(result)
56
             self.local_write_pipe.send(result)
57
 
57
 
58
 
58
 
76
     def make_them_work(self, message: typing.Any) -> 'TODO':
76
     def make_them_work(self, message: typing.Any) -> 'TODO':
77
         responses = []
77
         responses = []
78
 
78
 
79
-        for worker in self.workers:
80
-            worker.process_write_pipe.send(message)
79
+        for worker_id, worker in enumerate(self.workers):
80
+            worker.process_write_pipe.send((worker_id, self._process_count, message))
81
 
81
 
82
         for worker in self.workers:
82
         for worker in self.workers:
83
             responses.append(worker.local_read_pipe.recv())
83
             responses.append(worker.local_read_pipe.recv())
90
 
90
 
91
         for worker in self.workers:
91
         for worker in self.workers:
92
             worker.process.join()
92
             worker.process.join()
93
-
94
-    #
95
-    # def chunk_and_execute_jobs(self, data: list, job_maker: types.FunctionType) -> list:
96
-    #     chunks = self._chunk_manager.make_chunks(data)
97
-    #
98
-    #     if self._process_count > 1:
99
-    #         print('USE POOL')
100
-    #         results = self.pool.starmap(job_maker, [(chunk, i, self._process_count) for i, chunk in enumerate(chunks)])
101
-    #     else:
102
-    #         print('USE MONO')
103
-    #         results = [job_maker(data, 0, 1)]
104
-    #
105
-    #     return results
106
-    #
107
-    # def execute_jobs(self, data: object, job_maker: types.FunctionType) -> list:
108
-    #     # TODO: Is there a reason to make multiprocessing here ? data is not chunked ...
109
-    #     if self._process_count > 1:
110
-    #         results = self.pool.starmap(job_maker, [(data, i, self._process_count) for i in range(self._process_count)])
111
-    #     else:
112
-    #         results = [job_maker(data, 0, 1)]
113
-    #
114
-    #     return results
115
-    #
116
-    # def __del__(self):
117
-    #     # TODO: DEV
118
-    #     return
119
-    #     if self.pool:
120
-    #         self.pool.terminate()

+ 87 - 4
synergine2/share.py View File

25
         raise NotImplementedError()
25
         raise NotImplementedError()
26
 
26
 
27
 
27
 
28
+class TrackedDict(dict):
29
+    base = dict
30
+
31
+    def __init__(self, seq=None, **kwargs):
32
+        self.key = kwargs.pop('key')
33
+        self.shared = kwargs.pop('shared')
34
+        super().__init__(seq, **kwargs)
35
+
36
+    def __setitem__(self, key, value):
37
+        super().__setitem__(key, value)
38
+        self.shared.set(self.key, dict(self))
39
+
40
+    def setdefault(self, k, d=None):
41
+        v = super().setdefault(k, d)
42
+        self.shared.set(self.key, dict(self))
43
+        return v
44
+    # TODO: Cover all methods
45
+
46
+
47
+class TrackedList(list):
48
+    base = list
49
+
50
+    def __init__(self, seq=(), **kwargs):
51
+        self.key = kwargs.pop('key')
52
+        self.shared = kwargs.pop('shared')
53
+        super().__init__(seq)
54
+
55
+    def append(self, p_object):
56
+        super().append(p_object)
57
+        self.shared.set(self.key, list(self))
58
+
59
+    # TODO: Cover all methods
60
+
61
+
28
 class SharedDataManager(object):
62
 class SharedDataManager(object):
29
     """
63
     """
30
     This object is designed to own shared memory between processes. It must be feed (with set method) before
64
     This object is designed to own shared memory between processes. It must be feed (with set method) before
35
 
69
 
36
         self._data = {}
70
         self._data = {}
37
         self._modified_keys = set()
71
         self._modified_keys = set()
72
+        self._default_values = {}
73
+        self._special_types = {}  # type: typing.Dict[str, typing.Union[typing.Type[TrackedDict], typing.Type[TrackedList]]]  # nopep8
38
 
74
 
39
         if clear:
75
         if clear:
40
-            self._r.flushdb()
76
+            self.clear()
77
+
78
+    def clear(self) -> None:
79
+        self._r.flushdb()
80
+        self._data = {}
81
+        self._modified_keys = set()
82
+
83
+    def reset(self) -> None:
84
+        for key, value in self._default_values.items():
85
+            self.set(key, value)
86
+        self.commit()
87
+        self._data = {}
41
 
88
 
42
     def set(self, key: str, value: typing.Any) -> None:
89
     def set(self, key: str, value: typing.Any) -> None:
90
+        try:
91
+            special_type = self._special_types[key]
92
+            value = special_type(value, key=key, shared=self)
93
+        except KeyError:
94
+            pass
95
+
43
         self._data[key] = value
96
         self._data[key] = value
44
         self._modified_keys.add(key)
97
         self._modified_keys.add(key)
45
 
98
 
46
     def get(self, *key_args: typing.Union[str, float, int]) -> typing.Any:
99
     def get(self, *key_args: typing.Union[str, float, int]) -> typing.Any:
47
         key = '_'.join([str(v) for v in key_args])
100
         key = '_'.join([str(v) for v in key_args])
48
 
101
 
49
-        if key not in self._data:
102
+        try:
103
+            return self._data[key]
104
+        except KeyError:
50
             b_value = self._r.get(key)
105
             b_value = self._r.get(key)
51
             if b_value is None:
106
             if b_value is None:
52
                 # We not allow None value storage
107
                 # We not allow None value storage
53
                 raise UnknownSharedData('No shared data for key "{}"'.format(key))
108
                 raise UnknownSharedData('No shared data for key "{}"'.format(key))
54
-            self._data[key] = pickle.loads(b_value)
109
+
110
+            value = pickle.loads(b_value)
111
+            special_type = None
112
+
113
+            try:
114
+                special_type = self._special_types[key]
115
+            except KeyError:
116
+                pass
117
+
118
+            if special_type:
119
+                self._data[key] = special_type(value, key=key, shared=self)
120
+            else:
121
+                self._data[key] = value
55
 
122
 
56
         return self._data[key]
123
         return self._data[key]
57
 
124
 
58
     def commit(self) -> None:
125
     def commit(self) -> None:
59
         for key in self._modified_keys:
126
         for key in self._modified_keys:
60
-            self._r.set(key, pickle.dumps(self.get(key)))
127
+            try:
128
+                special_type = self._special_types[key]
129
+                value = special_type.base(self.get(key))
130
+                self._r.set(key, pickle.dumps(value))
131
+            except KeyError:
132
+                self._r.set(key, pickle.dumps(self.get(key)))
61
         self._modified_keys = set()
133
         self._modified_keys = set()
62
 
134
 
63
     def refresh(self) -> None:
135
     def refresh(self) -> None:
83
             key = '_'.join(key_args)
155
             key = '_'.join(key_args)
84
         indexes = indexes or []
156
         indexes = indexes or []
85
 
157
 
158
+        if type(value) is dict:
159
+            value = TrackedDict(value, key=key, shared=shared)
160
+            self._special_types[key] = TrackedDict
161
+        elif type(value) is list:
162
+            value = TrackedList(value, key=key, shared=shared)
163
+            self._special_types[key] = TrackedList
164
+
86
         def get_key(obj):
165
         def get_key(obj):
87
             return key
166
             return key
88
 
167
 
93
             key_formatter = get_key_with_id
172
             key_formatter = get_key_with_id
94
         else:
173
         else:
95
             self.set(key, value)
174
             self.set(key, value)
175
+            self._default_values[key] = value
96
             key_formatter = get_key
176
             key_formatter = get_key
97
 
177
 
98
         def fget(self_):
178
         def fget(self_):
122
 
202
 
123
         return shared_property
203
         return shared_property
124
 
204
 
205
+# TODO: Does exist a way to permit overload of SharedDataManager class ?
206
+shared = SharedDataManager()
207
+
125
 
208
 
126
 class ListIndex(SharedDataIndex):
209
 class ListIndex(SharedDataIndex):
127
     def add(self, value):
210
     def add(self, value):

+ 107 - 41
synergine2/simulation.py View File

4
 
4
 
5
 from synergine2.base import BaseObject
5
 from synergine2.base import BaseObject
6
 from synergine2.config import Config
6
 from synergine2.config import Config
7
+from synergine2.share import shared
7
 from synergine2.utils import get_mechanisms_classes
8
 from synergine2.utils import get_mechanisms_classes
8
 
9
 
9
 
10
 
26
     collections = []
27
     collections = []
27
     behaviours_classes = []
28
     behaviours_classes = []
28
     behaviour_selector_class = None  # type: typing.Type[SubjectBehaviourSelector]
29
     behaviour_selector_class = None  # type: typing.Type[SubjectBehaviourSelector]
30
+    intention_manager_class = None  # type: typing.Type[IntentionManager]
29
 
31
 
30
     def __init__(
32
     def __init__(
31
         self,
33
         self,
36
         self.collections = self.collections[:]
38
         self.collections = self.collections[:]
37
 
39
 
38
         self.config = config
40
         self.config = config
39
-        self.id = id(self)  # We store object id because it's lost between process
41
+        self._id = id(self)  # We store object id because it's lost between process
40
         self.simulation = simulation
42
         self.simulation = simulation
41
-        self.behaviours = {}
42
-        self.mechanisms = {}
43
-        self.intentions = IntentionManager()
44
-        self.behaviour_selector = None  # type: SubjectBehaviourSelector
43
+        self.intentions = None
44
+
45
         if self.behaviour_selector_class:
45
         if self.behaviour_selector_class:
46
             self.behaviour_selector = self.behaviour_selector_class()
46
             self.behaviour_selector = self.behaviour_selector_class()
47
+        else:
48
+            self.behaviour_selector = SubjectBehaviourSelector()
49
+
50
+        if self.intention_manager_class:
51
+            self.intentions = self.intention_manager_class()
52
+        else:
53
+            self.intentions = IntentionManager()
54
+
55
+        # TODO: Revoir le mechanisme de collection: utilité, usage avec les process, etc
56
+        # for collection in self.collections:
57
+        #     self.simulation.collections[collection].append(self)
58
+
59
+    @property
60
+    def id(self) -> int:
61
+        return self._id
47
 
62
 
48
-        for collection in self.collections:
49
-            self.simulation.collections[collection].append(self)
63
+    def change_id(self, id_: int) -> None:
64
+        self._id = id_
50
 
65
 
51
-        self.initialize()
66
+    def expose(self) -> None:
67
+        subject_behaviours_index = shared.get('subject_behaviours_index').setdefault(self._id, [])
68
+        subject_mechanisms_index = shared.get('subject_mechanisms_index').setdefault(self._id, [])
69
+        subject_classes = shared.get('subject_classes')
70
+
71
+        for behaviour_class in self.behaviours_classes:
72
+            subject_behaviours_index.append(id(behaviour_class))
73
+            for mechanism_class in behaviour_class.use:
74
+                subject_mechanisms_index.append(id(mechanism_class))
75
+
76
+        subject_classes[self._id] = id(type(self))
52
 
77
 
53
     def __str__(self):
78
     def __str__(self):
54
         return self.__repr__()
79
         return self.__repr__()
59
             self.id,
84
             self.id,
60
         )
85
         )
61
 
86
 
62
-    def initialize(self):
63
-        for mechanism_class in get_mechanisms_classes(self):
64
-            self.mechanisms[mechanism_class] = mechanism_class(
65
-                config=self.config,
66
-                simulation=self.simulation,
67
-                subject=self,
68
-            )
69
-
70
-        for behaviour_class in self.behaviours_classes:
71
-            self.behaviours[behaviour_class] = behaviour_class(
72
-                config=self.config,
73
-                simulation=self.simulation,
74
-                subject=self,
75
-            )
76
-
77
 
87
 
78
 class Subjects(list):
88
 class Subjects(list):
79
     """
89
     """
80
     TODO: Manage other list methods
90
     TODO: Manage other list methods
81
     """
91
     """
92
+    subject_ids = shared.create('subject_ids', [])
93
+
82
     def __init__(self, *args, **kwargs):
94
     def __init__(self, *args, **kwargs):
83
         self.simulation = kwargs.pop('simulation')
95
         self.simulation = kwargs.pop('simulation')
84
         self.removes = []
96
         self.removes = []
85
         self.adds = []
97
         self.adds = []
86
         self.track_changes = False
98
         self.track_changes = False
87
         self.index = {}
99
         self.index = {}
100
+        self._auto_expose = True
88
         super().__init__(*args, **kwargs)
101
         super().__init__(*args, **kwargs)
89
 
102
 
103
+    @property
104
+    def auto_expose(self) -> bool:
105
+        return self._auto_expose
106
+
107
+    @auto_expose.setter
108
+    def auto_expose(self, value: bool) -> None:
109
+        assert self._auto_expose
110
+        self._auto_expose = value
111
+
90
     def remove(self, value: Subject):
112
     def remove(self, value: Subject):
91
         # Remove from index
113
         # Remove from index
92
         del self.index[value.id]
114
         del self.index[value.id]
115
+        self.subject_ids.remove(value.id)
93
         # Remove from subjects list
116
         # Remove from subjects list
94
         super().remove(value)
117
         super().remove(value)
95
         # Remove from collections
118
         # Remove from collections
98
         # Add to removed listing
121
         # Add to removed listing
99
         if self.track_changes:
122
         if self.track_changes:
100
             self.removes.append(value)
123
             self.removes.append(value)
124
+        # TODO: Supprimer des choses du shared ! Sinon fuite mémoire dans la bdd
101
 
125
 
102
     def append(self, p_object):
126
     def append(self, p_object):
103
         # Add to index
127
         # Add to index
104
         self.index[p_object.id] = p_object
128
         self.index[p_object.id] = p_object
129
+        self.subject_ids.append(p_object.id)
105
         # Add to subjects list
130
         # Add to subjects list
106
         super().append(p_object)
131
         super().append(p_object)
107
         # Add to adds list
132
         # Add to adds list
108
         if self.track_changes:
133
         if self.track_changes:
109
             self.adds.append(p_object)
134
             self.adds.append(p_object)
135
+        if self.auto_expose:
136
+            p_object.expose()
110
 
137
 
111
 
138
 
112
 class Simulation(BaseObject):
139
 class Simulation(BaseObject):
113
     accepted_subject_class = Subjects
140
     accepted_subject_class = Subjects
114
     behaviours_classes = []
141
     behaviours_classes = []
115
 
142
 
143
+    subject_behaviours_index = shared.create('subject_behaviours_index', {})
144
+    subject_mechanisms_index = shared.create('subject_mechanisms_index', {})
145
+    subject_classes = shared.create('subject_classes', {})
146
+
116
     def __init__(
147
     def __init__(
117
         self,
148
         self,
118
         config: Config,
149
         config: Config,
119
     ):
150
     ):
120
         self.config = config
151
         self.config = config
121
         self.collections = collections.defaultdict(list)
152
         self.collections = collections.defaultdict(list)
122
-        self._subjects = None
153
+        self._subjects = None  # type: Subjects
154
+
155
+        # Should contain all usable class of Behaviors, Mechanisms, SubjectBehaviourSelectors,
156
+        # IntentionManagers, Subject
157
+        self._index = {}  # type: typing.Dict[int, type]
158
+        self._index_locked = False
159
+
123
         self.behaviours = {}
160
         self.behaviours = {}
124
         self.mechanisms = {}
161
         self.mechanisms = {}
125
 
162
 
126
-        self.initialize()
163
+        for mechanism_class in get_mechanisms_classes(self):
164
+            self.mechanisms[mechanism_class.__name__] = mechanism_class(
165
+                config=self.config,
166
+                simulation=self,
167
+            )
168
+
169
+        for behaviour_class in self.behaviours_classes:
170
+            self.behaviours[behaviour_class.__name__] = behaviour_class(
171
+                config=self.config,
172
+                simulation=self,
173
+            )
174
+
175
+    def add_to_index(self, class_: type) -> None:
176
+        assert not self._index_locked
177
+        self._index[id(class_)] = class_
178
+
179
+    @property
180
+    def index(self) -> typing.Dict[int, type]:
181
+        return self._index
182
+
183
+    def lock_index(self) -> None:
184
+        self._index_locked = True
127
 
185
 
128
     @property
186
     @property
129
     def subjects(self):
187
     def subjects(self):
137
             ))
195
             ))
138
         self._subjects = value
196
         self._subjects = value
139
 
197
 
140
-    def initialize(self):
141
-        for mechanism_class in get_mechanisms_classes(self):
142
-            self.mechanisms[mechanism_class] = mechanism_class(
143
-                config=self.config,
144
-                simulation=self,
145
-            )
198
+    def get_or_create_subject(self, subject_id: int) -> Subject:
199
+        try:
200
+            return self._subjects.index[subject_id]
201
+        except KeyError:
202
+            # We should be in process context and subject have to been created
203
+            subject_class_id = shared.get('subject_classes')[subject_id]
204
+            subject_class = self.index[subject_class_id]
205
+            subject = subject_class(self.config, self)
206
+            subject.change_id(subject_id)
207
+            self.subjects.append(subject)
208
+            return subject
146
 
209
 
147
-        for behaviour_class in self.behaviours_classes:
148
-            self.behaviours[behaviour_class] = behaviour_class(
149
-                config=self.config,
150
-                simulation=self,
151
-            )
152
 
210
 
211
+class Mechanism(BaseObject):
212
+    pass
153
 
213
 
154
-class SubjectMechanism(BaseObject):
214
+
215
+class SubjectMechanism(Mechanism):
155
     def __init__(
216
     def __init__(
156
             self,
217
             self,
157
             config: Config,
218
             config: Config,
166
         raise NotImplementedError()
227
         raise NotImplementedError()
167
 
228
 
168
 
229
 
169
-class SimulationMechanism(BaseObject):
230
+class SimulationMechanism(Mechanism):
170
     """If parallelizable behaviour, call """
231
     """If parallelizable behaviour, call """
171
     parallelizable = False
232
     parallelizable = False
172
 
233
 
193
         return self.__class__.__name__
254
         return self.__class__.__name__
194
 
255
 
195
 
256
 
196
-class SubjectBehaviour(BaseObject):
257
+class Behaviour(BaseObject):
258
+    def run(self, data):
259
+        raise NotImplementedError()
260
+
261
+
262
+class SubjectBehaviour(Behaviour):
197
     frequency = 1
263
     frequency = 1
198
     use = []  # type: typing.List[typing.Type[SubjectMechanism]]
264
     use = []  # type: typing.List[typing.Type[SubjectMechanism]]
199
 
265
 
226
         raise NotImplementedError()
292
         raise NotImplementedError()
227
 
293
 
228
 
294
 
229
-class SimulationBehaviour(BaseObject):
295
+class SimulationBehaviour(Behaviour):
230
     frequency = 1
296
     frequency = 1
231
     use = []
297
     use = []
232
 
298
 
265
         self,
331
         self,
266
         behaviours: typing.Dict[typing.Type[SubjectBehaviour], object],
332
         behaviours: typing.Dict[typing.Type[SubjectBehaviour], object],
267
     ) -> typing.Dict[typing.Type[SubjectBehaviour], object]:
333
     ) -> typing.Dict[typing.Type[SubjectBehaviour], object]:
268
-        raise NotImplementedError()
334
+        return behaviours

+ 3 - 0
synergine2/terminals.py View File

18
 
18
 
19
 
19
 
20
 class TerminalPackage(BaseObject):
20
 class TerminalPackage(BaseObject):
21
+    """
22
+    TODO: Update this class considering shared data across processes
23
+    """
21
     def __init__(
24
     def __init__(
22
             self,
25
             self,
23
             subjects: [Subject]=None,
26
             subjects: [Subject]=None,

+ 1 - 1
synergine2_xyz/physics.py View File

46
         self.move_cost_computer = self.move_cost_computer_class(config)
46
         self.move_cost_computer = self.move_cost_computer_class(config)
47
 
47
 
48
     def load(self) -> None:
48
     def load(self) -> None:
49
-        raise NotImplementedError()
49
+        pass
50
 
50
 
51
     def position_to_key(self, position: typing.Tuple[int, int]) -> str:
51
     def position_to_key(self, position: typing.Tuple[int, int]) -> str:
52
         return '{}.{}'.format(*position)
52
         return '{}.{}'.format(*position)

+ 1 - 1
synergine2_xyz/simulation.py View File

19
         self.physics.load()
19
         self.physics.load()
20
 
20
 
21
     def create_physics(self) -> Physics:
21
     def create_physics(self) -> Physics:
22
-        raise NotImplementedError()
22
+        return Physics(self.config)
23
 
23
 
24
     def is_possible_subject_position(self, subject: XYZSubject, position: tuple) -> bool:
24
     def is_possible_subject_position(self, subject: XYZSubject, position: tuple) -> bool:
25
         return self.is_possible_position(position)
25
         return self.is_possible_position(position)

+ 113 - 0
tests/test_cycle.py View File

1
+# coding: utf-8
2
+from synergine2.config import Config
3
+from synergine2.cycle import CycleManager
4
+from synergine2.log import SynergineLogger
5
+from synergine2.share import shared
6
+from synergine2.simulation import Simulation
7
+from synergine2.simulation import Event
8
+from synergine2.simulation import Subject
9
+from synergine2.simulation import Subjects
10
+from synergine2.simulation import SubjectMechanism
11
+from synergine2.simulation import SubjectBehaviour
12
+from tests import BaseTest
13
+
14
+
15
+class MyEvent(Event):
16
+    def __init__(self, value):
17
+        self.value = value
18
+
19
+
20
+class MySubjectMechanism(SubjectMechanism):
21
+    def run(self):
22
+        return 42
23
+
24
+
25
+class MySubjectBehavior(SubjectBehaviour):
26
+    use = [MySubjectMechanism]
27
+
28
+    def run(self, data):
29
+        class_name = MySubjectMechanism.__name__
30
+        if class_name in data and data[class_name] == 42:
31
+            return self.subject.id
32
+
33
+    def action(self, data) -> [Event]:
34
+        return [MyEvent(data * 2)]
35
+
36
+
37
+class MySubject(Subject):
38
+    behaviours_classes = [MySubjectBehavior]
39
+
40
+
41
+class MySubjects(Subjects):
42
+    pass
43
+
44
+
45
+class TestCycle(BaseTest):
46
+    # def test_subjects_cycle(self):
47
+    #     shared.reset()
48
+    #     config = Config({'core': {'use_x_cores': 2}})
49
+    #     logger = SynergineLogger(name='test')
50
+    #
51
+    #     simulation = Simulation(config)
52
+    #     subjects = MySubjects(simulation=simulation)
53
+    #     simulation.subjects = subjects
54
+    #
55
+    #     # Prepare simulation class index
56
+    #     simulation.add_to_index(MySubjectBehavior)
57
+    #     simulation.add_to_index(MySubjectMechanism)
58
+    #     simulation.add_to_index(MySubject)
59
+    #
60
+    #     for i in range(3):
61
+    #         subjects.append(MySubject(config, simulation=simulation))
62
+    #
63
+    #     cycle_manager = CycleManager(
64
+    #         config=config,
65
+    #         logger=logger,
66
+    #         simulation=simulation,
67
+    #     )
68
+    #
69
+    #     events = cycle_manager.next()
70
+    #     cycle_manager.stop()
71
+    #
72
+    #     assert 3 == len(events)
73
+    #     event_values = [e.value for e in events]
74
+    #     assert all([s.id * 2 in event_values for s in subjects])
75
+
76
+    def test_new_subject(self):
77
+        shared.reset()
78
+        subject_ids = shared.get('subject_ids')
79
+        config = Config({'core': {'use_x_cores': 1}})
80
+        logger = SynergineLogger(name='test')
81
+
82
+        simulation = Simulation(config)
83
+        subjects = MySubjects(simulation=simulation)
84
+        simulation.subjects = subjects
85
+
86
+        # Prepare simulation class index
87
+        simulation.add_to_index(MySubjectBehavior)
88
+        simulation.add_to_index(MySubjectMechanism)
89
+        simulation.add_to_index(MySubject)
90
+
91
+        for i in range(3):
92
+            subjects.append(MySubject(config, simulation=simulation))
93
+
94
+        cycle_manager = CycleManager(
95
+            config=config,
96
+            logger=logger,
97
+            simulation=simulation,
98
+        )
99
+
100
+        events = cycle_manager.next()
101
+
102
+        assert 3 == len(events)
103
+        event_values = [e.value for e in events]
104
+        assert all([s.id * 2 in event_values for s in subjects])
105
+
106
+        subjects.append(MySubject(config, simulation=simulation))
107
+        events = cycle_manager.next()
108
+        cycle_manager.stop()
109
+
110
+        assert 4 == len(events)
111
+        event_values = [e.value for e in events]
112
+        assert all([s.id * 2 in event_values for s in subjects])
113
+

+ 0 - 192
tests/test_life_game.py View File

1
-# coding: utf-8
2
-import collections
3
-
4
-from sandbox.life_game.simulation import Cell
5
-from sandbox.life_game.simulation import Empty
6
-from sandbox.life_game.utils import get_subjects_from_str_representation
7
-from synergine2.config import Config
8
-from synergine2.cycle import CycleManager
9
-from synergine2.log import SynergineLogger
10
-from synergine2_xyz.simulation import XYZSimulation
11
-from synergine2_xyz.subjects import XYZSubjects
12
-from synergine2_xyz.utils import get_str_representation_from_positions
13
-from tests import BaseTest
14
-from tests import str_kwargs
15
-
16
-
17
-class LifeGameBaseTest(BaseTest):
18
-    def _get_str_representation_of_subjects(self, subjects: list):
19
-        items_positions = collections.defaultdict(list)
20
-
21
-        for subject in subjects:
22
-            if type(subject) == Cell:
23
-                items_positions['1'].append(subject.position)
24
-            if type(subject) == Empty:
25
-                items_positions['0'].append(subject.position)
26
-
27
-        return get_str_representation_from_positions(
28
-            items_positions,
29
-            complete_lines_with='0',
30
-            **str_kwargs
31
-        )
32
-
33
-
34
-class TestSimpleSimulation(LifeGameBaseTest):
35
-    def test_cycles_evolution(self):
36
-        simulation = XYZSimulation(Config())
37
-        subjects = self._get_subjects(simulation)
38
-        simulation.subjects = subjects
39
-
40
-        cycle_manager = CycleManager(
41
-            Config(),
42
-            SynergineLogger('test'),
43
-            simulation=simulation,
44
-        )
45
-
46
-        assert """
47
-            0 0 0 0 0
48
-            0 1 1 1 0
49
-            0 0 0 0 0
50
-        """ == self._get_str_representation_of_subjects(
51
-            subjects,
52
-        )
53
-
54
-        cycle_manager.next()
55
-
56
-        assert """
57
-            0 0 0 0 0
58
-            0 0 1 0 0
59
-            0 0 1 0 0
60
-            0 0 1 0 0
61
-            0 0 0 0 0
62
-        """ == self._get_str_representation_of_subjects(
63
-            subjects,
64
-        )
65
-
66
-        cycle_manager.next()
67
-
68
-        assert """
69
-            0 0 0 0 0
70
-            0 0 0 0 0
71
-            0 1 1 1 0
72
-            0 0 0 0 0
73
-            0 0 0 0 0
74
-        """ == self._get_str_representation_of_subjects(
75
-            subjects,
76
-        )
77
-
78
-    def _get_subjects(self, simulation: XYZSimulation):
79
-        cells = XYZSubjects(simulation=simulation)
80
-
81
-        for position in [
82
-            (-1, 0, 0),
83
-            (0, 0, 0),
84
-            (1, 0, 0),
85
-        ]:
86
-            cells.append(Cell(
87
-                Config(),
88
-                simulation=simulation,
89
-                position=position,
90
-            ))
91
-
92
-        for position in [
93
-            (-2, -1, 0),
94
-            (-1, -1, 0),
95
-            (0, -1, 0),
96
-            (1, -1, 0),
97
-            (2, -1, 0),
98
-            (-2, 0, 0),
99
-            (2, 0, 0),
100
-            (-2, 1, 0),
101
-            (-1, 1, 0),
102
-            (0, 1, 0),
103
-            (1, 1, 0),
104
-            (2, 1, 0),
105
-        ]:
106
-            cells.append(Empty(
107
-                Config(),
108
-                simulation=simulation,
109
-                position=position,
110
-            ))
111
-        return cells
112
-
113
-
114
-class TestMultipleSimulations(LifeGameBaseTest):
115
-    def test_cross(self):
116
-        str_representations = [
117
-            """
118
-            0 0 0 0 0 0 0 0 0 0 0
119
-            0 0 0 1 1 1 1 0 0 0 0
120
-            0 0 0 1 0 0 1 0 0 0 0
121
-            0 1 1 1 0 0 1 1 1 0 0
122
-            0 1 0 0 0 0 0 0 1 0 0
123
-            0 1 0 0 0 0 0 0 1 0 0
124
-            0 1 1 1 0 0 1 1 1 0 0
125
-            0 0 0 1 0 0 1 0 0 0 0
126
-            0 0 0 1 1 1 1 0 0 0 0
127
-            0 0 0 0 0 0 0 0 0 0 0
128
-            0 0 0 0 0 0 0 0 0 0 0
129
-        """,
130
-            """
131
-            0 0 0 0 0 0 0 0 0 0 0 0
132
-            0 0 0 0 0 1 1 0 0 0 0 0
133
-            0 0 0 0 1 1 1 1 0 0 0 0
134
-            0 0 0 0 0 0 0 0 0 0 0 0
135
-            0 0 1 0 1 0 0 1 0 1 0 0
136
-            0 1 1 0 0 0 0 0 0 1 1 0
137
-            0 1 1 0 0 0 0 0 0 1 1 0
138
-            0 0 1 0 1 0 0 1 0 1 0 0
139
-            0 0 0 0 0 0 0 0 0 0 0 0
140
-            0 0 0 0 1 1 1 1 0 0 0 0
141
-            0 0 0 0 0 1 1 0 0 0 0 0
142
-            0 0 0 0 0 0 0 0 0 0 0 0
143
-        """,
144
-            """
145
-            0 0 0 0 0 0 0 0 0 0 0 0
146
-            0 0 0 0 1 0 0 1 0 0 0 0
147
-            0 0 0 0 1 0 0 1 0 0 0 0
148
-            0 0 0 1 1 0 0 1 1 0 0 0
149
-            0 1 1 1 0 0 0 0 1 1 1 0
150
-            0 0 0 0 0 0 0 0 0 0 0 0
151
-            0 0 0 0 0 0 0 0 0 0 0 0
152
-            0 1 1 1 0 0 0 0 1 1 1 0
153
-            0 0 0 1 1 0 0 1 1 0 0 0
154
-            0 0 0 0 1 0 0 1 0 0 0 0
155
-            0 0 0 0 1 0 0 1 0 0 0 0
156
-            0 0 0 0 0 0 0 0 0 0 0 0
157
-        """,
158
-            """
159
-            0 0 0 0 0 0 0 0 0 0 0 0
160
-            0 0 0 0 0 0 0 0 0 0 0 0
161
-            0 0 0 0 1 1 1 1 0 0 0 0
162
-            0 0 0 0 1 0 0 1 0 0 0 0
163
-            0 0 1 1 1 0 0 1 1 1 0 0
164
-            0 0 1 0 0 0 0 0 0 1 0 0
165
-            0 0 1 0 0 0 0 0 0 1 0 0
166
-            0 0 1 1 1 0 0 1 1 1 0 0
167
-            0 0 0 0 1 0 0 1 0 0 0 0
168
-            0 0 0 0 1 1 1 1 0 0 0 0
169
-            0 0 0 0 0 0 0 0 0 0 0 0
170
-            0 0 0 0 0 0 0 0 0 0 0 0
171
-        """,
172
-        ]
173
-
174
-        simulation = XYZSimulation(Config())
175
-        subjects = get_subjects_from_str_representation(
176
-            str_representations[0],
177
-            simulation,
178
-        )
179
-        simulation.subjects = subjects
180
-
181
-        cycle_manager = CycleManager(
182
-            config=Config(),
183
-            logger=SynergineLogger('test'),
184
-            simulation=simulation,
185
-        )
186
-
187
-        for str_representation in str_representations:
188
-            assert str_representation == \
189
-               self._get_str_representation_of_subjects(
190
-                    subjects,
191
-               )
192
-            cycle_manager.next()

+ 5 - 1
tests/test_processing.py View File

24
     @pytest.mark.timeout(10)
24
     @pytest.mark.timeout(10)
25
     def make_job_with_scalar(
25
     def make_job_with_scalar(
26
             self,
26
             self,
27
+            worker_id: int,
28
+            process_count: int,
27
             data: list,
29
             data: list,
28
     ):
30
     ):
29
         result = sum(data)
31
         result = sum(data)
32
     @pytest.mark.timeout(10)
34
     @pytest.mark.timeout(10)
33
     def make_job_with_object(
35
     def make_job_with_object(
34
             self,
36
             self,
37
+            worker_id: int,
38
+            process_count: int,
35
             data: list,
39
             data: list,
36
     ):
40
     ):
37
         data = [o.value for o in data]
41
         data = [o.value for o in data]
149
 
153
 
150
         shared.set('foo_1', 0)
154
         shared.set('foo_1', 0)
151
 
155
 
152
-        def job(key):
156
+        def job(worker_id, processes_count, key):
153
             shared.refresh()
157
             shared.refresh()
154
             value = shared.get('foo_{}'.format(key)) or 0
158
             value = shared.get('foo_{}'.format(key)) or 0
155
             return value + 1
159
             return value + 1

+ 34 - 0
tests/test_share.py View File

1
 # coding: utf-8
1
 # coding: utf-8
2
+import pickle
3
+
2
 import pytest
4
 import pytest
3
 
5
 
4
 from synergine2.exceptions import UnknownSharedData
6
 from synergine2.exceptions import UnknownSharedData
99
         foo.data['foo'] = 'buz'
101
         foo.data['foo'] = 'buz'
100
         assert shared.get('data') == {'foo': 'buz'}
102
         assert shared.get('data') == {'foo': 'buz'}
101
 
103
 
104
+        shared.commit()
105
+        assert shared.get('data') == {'foo': 'buz'}
106
+        assert pickle.loads(shared._r.get('data')) == {'foo': 'buz'}
107
+
108
+        foo.data['foo'] = 'bAz'
109
+        shared.commit()
110
+        assert shared.get('data') == {'foo': 'bAz'}
111
+        assert pickle.loads(shared._r.get('data')) == {'foo': 'bAz'}
112
+
113
+    def test_update_list_with_pointer(self):
114
+        shared = share.SharedDataManager()
115
+
116
+        class Foo(object):
117
+            data = shared.create('data', [])
118
+
119
+        foo = Foo()
120
+        foo.data = ['foo']
121
+
122
+        assert shared.get('data') == ['foo']
123
+
124
+        foo.data.append('bar')
125
+        assert shared.get('data') == ['foo', 'bar']
126
+
127
+        shared.commit()
128
+        assert shared.get('data') == ['foo', 'bar']
129
+        assert pickle.loads(shared._r.get('data')) == ['foo', 'bar']
130
+
131
+        foo.data.append('bAr')
132
+        shared.commit()
133
+        assert shared.get('data') == ['foo', 'bar', 'bAr']
134
+        assert pickle.loads(shared._r.get('data')) == ['foo', 'bar', 'bAr']
135
+
102
     def test_refresh_without_commit(self):
136
     def test_refresh_without_commit(self):
103
         shared = share.SharedDataManager()
137
         shared = share.SharedDataManager()
104
 
138