Add AggregateEvents, and demonstrate that they work (silly-ly.)
Cat's Eye Technologies
9 years ago
52 | 52 | def initiator(self): |
53 | 53 | return self.participants[0] |
54 | 54 | |
55 | def __str__(self): | |
55 | def render(self): | |
56 | 56 | phrase = self.phrase |
57 | 57 | i = 0 |
58 | 58 | for participant in self.participants: |
64 | 64 | phrase = phrase.replace('<was-%d>' % (i + 1), participant.was()) |
65 | 65 | phrase = phrase.replace('<is-%d>' % (i + 1), participant.is_()) |
66 | 66 | i = i + 1 |
67 | return phrase | |
68 | ||
69 | def __str__(self): | |
70 | phrase = self.render() | |
71 | if self.excl: | |
72 | phrase = phrase + '!' | |
73 | else: | |
74 | phrase = phrase + '.' | |
75 | return phrase[0].upper() + phrase[1:] | |
76 | ||
77 | ||
78 | class AggregateEvent(Event): | |
79 | """Attempt at a way to combine multiple events into a single | |
80 | sentence. Each constituent event must have the same initiator. | |
81 | ||
82 | """ | |
83 | def __init__(self, template, events, excl=False): | |
84 | self.template = template | |
85 | self.events = events | |
86 | self.excl = excl | |
87 | self.phrase = 'SEE SUBEVENTS PLZ' | |
88 | self._initiator = self.events[0].initiator() | |
89 | for event in self.events: | |
90 | assert event.initiator() == self._initiator | |
91 | self.location = self._initiator.location | |
92 | ||
93 | def rephrase(self, new_phrase): | |
94 | #raise NotImplementedError | |
95 | return self | |
96 | ||
97 | def initiator(self): | |
98 | return self._initiator | |
99 | ||
100 | def __str__(self): | |
101 | phrase = self.template % tuple([x.render() for x in self.events]) | |
67 | 102 | if self.excl: |
68 | 103 | phrase = phrase + '!' |
69 | 104 | else: |
160 | 195 | |
161 | 196 | # update our idea of where the character is, even if these are |
162 | 197 | # not events we will be dumping out |
163 | self.character_location[event.participants[0]] = event.location | |
198 | self.character_location[event.initiator()] = event.location | |
164 | 199 | |
165 | 200 | if event.location == self.character_location[pov_actor]: |
166 | 201 | paragraph_events.append(event) |
167 | 202 | # update the reader's idea of where the character is |
168 | self.last_seen_at[event.participants[0]] = event.location | |
203 | self.last_seen_at[event.initiator()] = event.location | |
169 | 204 | |
170 | 205 | return paragraph_events |
171 | 206 | |
193 | 228 | while consume_another_event and incoming_events: |
194 | 229 | consume_another_event = False |
195 | 230 | event = incoming_events.pop() |
196 | last_character = events[-1].participants[0] | |
197 | if event.participants[0] == last_character: | |
198 | ||
231 | last_character = events[-1].initiator() | |
232 | if event.initiator() == last_character: | |
233 | ||
199 | 234 | # replace repeated proper nouns with pronouns |
200 | 235 | if event.phrase.startswith('<1>'): |
201 | 236 | event.phrase = '<he-1>' + event.phrase[3:] |
251 | 286 | def publish_chapter(self, chapter_num): |
252 | 287 | collector = EventCollector() |
253 | 288 | |
254 | for actor in self.characters: | |
255 | actor.collector = collector | |
289 | for character in self.characters: | |
290 | character.collector = collector | |
256 | 291 | # don't continue a conversation from the previous chapter, please |
257 | actor.topic = None | |
258 | actor.place_in(random.choice(self.setting)) | |
292 | character.topic = None | |
293 | character.place_in(random.choice(self.setting)) | |
294 | ||
295 | # just testing | |
296 | for character in self.characters: | |
297 | character.collector.collect(AggregateEvent( | |
298 | "%s, then %s", | |
299 | [ | |
300 | Event("<1> looked at <his-1> shoes", [character]), | |
301 | Event("<1> looked at the sky", [character]), | |
302 | ], | |
303 | excl=True)) | |
259 | 304 | |
260 | 305 | while len(collector.events) < self.events_per_chapter: |
261 | for actor in self.characters: | |
262 | actor.live() | |
306 | for character in self.characters: | |
307 | character.live() | |
263 | 308 | #print len(collector.events) # , repr([str(e) for e in collector.events]) |
264 | 309 | |
265 | 310 | # this contains duplicates because we are producing duplicates in |