1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22 """dependence tracking using a directed acyclic graph (DAG)"""
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70 import os
71 import sys
72 import platform
73 import types
74 import collections
75 try:
76 import cPickle as apickle
77 except:
78 import pickle as apickle
79
80
81 from base import *
82 from work_queue import *
83 from env import *
84 from util import *
85 from plan import *
86 import scanner
87 import dfs
88 import util
89
91 """This stores the basic dependence structure for the
92 build. Creators are listed for files that are generated. The
93 signature is the last signature we saw for this."""
94 - def __init__(self, file_name, creator=None):
95 self.file_name = file_name
96
97 self.old_signature = None
98 self.signature = None
99 self.scanned_header = False
100
101
102
103
104 self.old_command_signature = None
105 self.command_signature = None
106
107
108 self.scanned = False
109
110
111
112 self.needs_to_run = False
113 self.visited = False
114 self.added = False
115
116
117
118
119 self.required = False
120
121 self.changed = None
122
123 self.creator = creator
124 self.files_that_are_inputs = []
125 self.files_that_depend_on_this = []
126
127 self.part_of_loop = False
128 self.index = 0
129 self.lowlink = 0
130
131 self.hash_file()
132
134
135 if os.path.exists(self.file_name):
136 try:
137 lines = file(self.file_name).readlines()
138 except:
139 die("COULD NOT READ: %s" %(str(self.file_name)))
140 self.signature = hash_list(lines)
141 if verbose(99):
142 msgb("HASHFILE", "%s -> %s" % (self.signature,
143 self.file_name))
144 else:
145 if verbose(99):
146 msgb("COULD NOT HASH MISSING FILE", self.file_name)
147
149 if self.signature == None:
150 self.hash_file()
151
152
154 if not os.path.exists(self.file_name):
155 return True
156 return False
157
158
160 if self.file_name in required_files:
161 return True
162 if os.path.basename(self.file_name) in required_files:
163 return True
164 return False
165
166
168 """Return True if there is no old signature or the old
169 signature does not equal the current signature, or the file
170 does not exist"""
171 if self.missing():
172
173
174
175
176 if verbose(10):
177 msgb("MISSING FILE", self.file_name)
178 if self._check_required(required_files):
179 if verbose(10):
180 msgb("MISSING REQUIRED FILE->CHANGED")
181 return True
182
183 if self.scanned_header:
184 if verbose(10):
185 msgb("MISSING SCANNED HEADER FILE->UNCHANGED")
186 return False
187 if verbose(10):
188 msgb("MISSING FILE->ASSUME CHANGED")
189 return True
190 else:
191 if self.old_signature:
192 self.hash_if_needed()
193 if self.old_signature == self.signature:
194 return False
195 elif verbose(10):
196 msgb("SIG MISMATCH for %s" % self.file_name)
197 msgb("OLD SIG %s" % str(self.old_signature))
198 msgb("NEW SIG %s" % str(self.signature))
199 elif verbose(10):
200 msgb("NO OLD SIG for %s" % self.file_name)
201 return True
202
204 """Compute changed bit if it has not been computed yet. Return
205 the value."""
206 if self.changed == None:
207 self.changed = self._compute_changed_bit(required_files)
208 if verbose(10):
209 msgb("COMPUTE CHANGE BIT", "%s for %s" %
210 ( str(self.changed), self.file_name))
211 return self.changed
212
213
217
219
220 s = "\tANCESTORS: %s\nTARGET: %s\n\tDESCENDENTS: %s\n" % \
221 (self.format_string(self.files_that_are_inputs),
222 self.file_name,
223 self.format_string(self.files_that_depend_on_this))
224
225 if self.creator:
226 s += "\tCREATOR: %s\n" % self.creator.dump()
227 if self.visited:
228 s += "\tVISITED\n"
229 else:
230 s += "\tNOT-VISITED\n"
231
232 if self.part_of_loop:
233 s += "\tIN-LOOP\n"
234 else:
235 s += "\tNOT-IN-LOOP\n"
236
237 if self.required:
238 s += "\tREQUIRED\n"
239 else:
240 s += "\tNOT-REQUIRED\n"
241
242 if self.changed:
243 s += "\tCHANGED\n"
244 else:
245 s += "\tCHANGED\n"
246 return s
247
249 """print a string representing this node of the DAG. The
250 string comes from the __str__ function"""
251 print self.dump_str()
253 return self.dump_str()
254
257 self.signature = signature
258
259
261 """
262 This object builds a DAG of files an sequences their submission to
263 the parallel work queue of type L{work_queue_t}.
264
265 This takes L{plan_t} objects representing command
266 strings or python functions, and creates L{command_t}
267 objects suitable for use in the L{work_queue_t}.
268
269 As the L{work_queue_t} executes, it queries this DAG for more
270 ready commands or functions to execute.
271 """
272
273
274 - def __init__(self, name='default', env=None):
275 self.name = name
276 self.recs = {}
277
278
279 self.old_signatures = {}
280
281
282
283
284 self._python_commands_changed = False
285
286 self.signature_file_name = ".mbuild.hash." + self.name
287 if env:
288 self.signature_file_name = env.build_dir_join(
289 self.signature_file_name)
290
291
292
293 self.signature_file_name = os.path.abspath(self.signature_file_name)
294
295 if os.path.exists(self.signature_file_name):
296 self._read_signatures(self.signature_file_name)
297
298 if env and 'required' in env:
299 self.required_set = \
300 set(self._canonize_if_exists_fn(env['required']))
301 else:
302 self.required_set = set()
303
304
306 """Check the DAG for illegal cycles in the include structure.
307 @rtype: bool
308 @return: True if the DAG contains cycles (and thus is not a DAG).
309 """
310 node_dict = {}
311
312 for k,v in self.recs.iteritems():
313 if k in node_dict:
314 node = node_dict[k]
315 else:
316 node = dfs.node_t(k)
317 node_dict[k] = node
318 for p in v.files_that_are_inputs:
319 if p in node_dict:
320 pnode = node_dict[p]
321 else:
322 pnode = dfs.node_t(p)
323 node_dict[p] = pnode
324 node.add_successor(pnode)
325
326 cycle = dfs.dfs(node_dict.values())
327 if cycle:
328 msgb("CYCLE DETECTED IN DAG")
329 return cycle
330
333
335 """print a string representing the DAG. """
336 print "DAG DUMP"
337 for v in self.recs.itervalues():
338 v.dump()
339
341
342 if isinstance(l, types.ListType):
343 il = l
344 else:
345 il = [l]
346 s = []
347 for i in il:
348 if i.is_command_line():
349 s.append(i.command)
350 t = " - ".join(s)
351 h = hash_string(t)
352 return h
353
355 """Write a dictionary of _mbuild_storage_object_t's to the
356 given file name"""
357 if verbose(10):
358 msgb("WRITING SIGNATURES", self.signature_file_name)
359 d = {}
360 for (k,v) in self.recs.iteritems():
361
362
363 if v.creator:
364 if v.creator.is_command_line() and v.creator.completed:
365
366
367 command_hash = v.creator.hash()
368 full_key = v.creator.dagkey()
369 d[full_key]= _mbuild_storage_object_t(command_hash)
370 if verbose(99):
371 msgb("SIGWRITE", "%s -> %s" % (str(command_hash),
372 full_key))
373 if v.creator.completed and v.creator.exit_status == 0:
374 v.hash_file()
375 if v.creator and v.creator.failed():
376 if verbose(99):
377 msgb("NULLIFY SIG", k)
378 v.signature = None
379 if not v.signature:
380 if verbose(99):
381 msgb("FIXING NULL SIGNATURE", k)
382 v.hash_file()
383
384 if verbose(99):
385 msgb("SIGWRITE", "%s -> %s" % (str(v.signature),k))
386 d[k] = _mbuild_storage_object_t(v.signature)
387
388
389
390 f = open(self.signature_file_name,"wb")
391 apickle.dump(d,f)
392 f.close()
393
395 """Return True if the signature matches the command object."""
396
397
398
399 if co.has_python_subcommand():
400 if self._python_commands_changed:
401 return False
402 else:
403 return True
404
405 full_key = co.dagkey()
406 try:
407 old_hash = self.old_signatures[full_key].signature
408 if verbose(99):
409 msgb('COMMAND HASH', full_key)
410 msgb('COMMAND HASH', old_hash)
411 new_hash = co.hash()
412 if old_hash == new_hash:
413 if verbose(99):
414 msgb('COMMAND HASH','\tMATCH')
415
416 return True
417 except:
418 if verbose(99):
419 msgb('COMMAND HASH','\tNO OLD HASH')
420
421 if verbose(99):
422 msgb('COMMAND HASH','\tDOES NOT MATCH')
423 return False
424
425
427 """Read a dictionary of _mbuild_storage_object_t's from the
428 given file name."""
429 if verbose(10):
430 msgb("READING SIGNATURES", file_name)
431 try:
432 f = open(file_name,"rb")
433 self.old_signatures = apickle.load(f)
434 f.close()
435 except:
436 warn("READING SIGNATURES FAILED FOR "+ file_name)
437 return
438 if verbose(99):
439 for k, v in self.old_signatures.iteritems():
440 msgb("SIGREAD", "%s -> %s" % (str(v.signature),k))
441
442
443 for k, v in self.recs.iteritems():
444 if k in self.old_signatures:
445 v.old_signature = self.old_signatures[k].signature
446
448 if fn in self.required_set:
449 return True
450 if os.path.basename(fn) in self.required_set:
451 return True
452 return False
453
454
456 """Returns (all_parents_visited, some_parents_changed)"""
457 all_parents_visited = True
458 some_parents_changed = False
459 for ancestor_fn in n.files_that_are_inputs:
460 try:
461 ancestor_rec = self.recs[ancestor_fn]
462 if ancestor_rec.visited:
463 if ancestor_rec.changed:
464 some_parents_changed = True
465 else:
466 all_parents_visited = False
467 except:
468 if self._check_required_file(ancestor_fn):
469 warn("[1] node %s: did not find ancestor node: %s" %
470 (n.file_name, ancestor_fn))
471
472 return (all_parents_visited, some_parents_changed)
473
475 """Returns True if some parent changed"""
476 for ancestor_fn in n.files_that_are_inputs:
477 try:
478 ancestor_rec = self.recs[ancestor_fn]
479 if ancestor_rec.visited:
480 if ancestor_rec.changed:
481 return True
482 except:
483 if self._check_required_file(ancestor_fn):
484 warn("[2] node %s: did not find ancestor node: %s" %
485 (n.file_name, ancestor_fn))
486 return False
487
488
490 """Returns True if all parents were visited or parents are part of a loop"""
491 for ancestor_fn in n.files_that_are_inputs:
492 try:
493 ancestor_rec = self.recs[ancestor_fn]
494 if not ancestor_rec.visited:
495 if verbose(10):
496 msgb("PARENT UNVISITED", "%s <- %s" %
497 (n.file_name, ancestor_fn))
498 if n.part_of_loop:
499 warn("Circularity involving %s" % (n.file_name))
500 return True
501 return False
502 except:
503 if self._check_required_file(ancestor_fn):
504 warn("[3] node %s: did not find ancestor node: %s" %
505 (n.file_name, ancestor_fn))
506 return True
507
509 """Returns True if all parents that have to execute have completed"""
510 for ancestor_fn in n.files_that_are_inputs:
511 try:
512 ancestor_rec = self.recs[ancestor_fn]
513 if ancestor_rec.creator:
514 if not ancestor_rec.creator.completed:
515 return False
516 except:
517 if self._check_required_file(ancestor_fn):
518 warn("[4] node %s: did not find ancestor node: %s" %
519 (n.file_name, ancestor_fn))
520 return True
521
523 """Set all the ancestors of the files in the list of files lof
524 argument to be required nodes."""
525 nodes = collections.deque()
526 for f in lof:
527 nodes.append(f)
528
529 while len(nodes) != 0:
530 f = nodes.popleft()
531 r = self.recs[f]
532 if not r.required:
533 if verbose(10):
534 msgb("MARKING-ANCESTORS AS REQUIRED", r.file_name)
535
536 r.required = True
537 for g in r.files_that_are_inputs:
538 nodes.append(g)
539
541 """Look at the targets list and mark the ancestors as
542 required for the build. Internal function"""
543 if verbose(10):
544 msgb("INPUT TARGETS", str(targets))
545 for v in self.recs.itervalues():
546 v.required = False
547
548 target_dictionary = dict.fromkeys(targets, True)
549 if verbose(10):
550 msgb("TARGETS", str(target_dictionary))
551 for v in self.recs.itervalues():
552 if v.creator:
553 if v.file_name in target_dictionary:
554 if not v.required:
555 if verbose(10):
556 msgb("MARK AS REQUIRED", v.file_name)
557 v.required = True
558 self._set_ancestors_to_required(v.files_that_are_inputs)
559
561 """Return a list of things that did not build but were tagged
562 as required for the build. This list could be nonempty because
563 (1)there was an error in the build or (2) there is a
564 circularity in the dependence structure."""
565 did_not_build = []
566 for v in self.recs.itervalues():
567 if v.required and not v.visited:
568 did_not_build.append(v.file_name)
569 return did_not_build
570
572
573
574 def _mark_loop(level,n,stack,all_sccs):
575
576 n.index = level
577 n.lowlink = level
578 level = level + 1
579 stack.append(n)
580
581 for cfn in n.files_that_depend_on_this:
582 child = self.recs[cfn]
583 if child.index == 0:
584 _mark_loop(level,child,stack,all_sccs)
585 n.lowlink = min(n.lowlink, child.lowlink)
586 elif child in stack:
587 n.lowlink = min(n.lowlink, child.index)
588
589 if n.lowlink == n.index:
590
591 scc = []
592
593 while 1:
594 child = stack.pop()
595 scc.append(child)
596 if child == n:
597 break
598 all_sccs.append(scc)
599
600 stack = collections.deque()
601 all_sccs = []
602 level = 1
603
604 for v in root_nodes:
605
606 _mark_loop(level,v,stack,all_sccs)
607
608
609 for scc in all_sccs:
610 if len(scc) > 1:
611 msg("===================================")
612 msg("CYCLE INVOLVING THESE FILES (will assume all ready):")
613 for n in scc:
614 msg("\t" + n.file_name)
615 n.part_of_loop = True
616 msg("===================================")
617
619 """Return a list of mbuild_dep_records_t for things with no
620 ancestors but with associated commands. targets is an optional
621 list of things to build. (called from work_queue.py)
622 """
623 nodes = collections.deque()
624
625 if targets:
626 if not isinstance(targets, types.ListType):
627 targets = [ targets ]
628 self._find_required_nodes(targets)
629 else:
630
631 for v in self.recs.itervalues():
632 v.required = True
633
634 self._find_loops(self.recs.itervalues())
635
636
637
638 for v in self.recs.itervalues():
639 v.visited = False
640 v.added = False
641 if (v.part_of_loop or len(v.files_that_are_inputs) == 0) and v.required:
642 v.needs_to_run = v.change_bit(self.required_set)
643 v.added = True
644 nodes.append(v)
645
646 if verbose(9):
647 if v.needs_to_run:
648 s = ": CHANGED"
649 else:
650 s = ''
651 msgb("ROOTSEARCH", v.file_name + s)
652 else:
653 v.needs_to_run = False
654
655 ready = self._ready_scan(nodes)
656 del nodes
657 return ready
658
660 """When a command completes, it must notify things that
661 depend on its stated target files. Return a list of ready
662 commands (called from work_queue.py)
663 """
664 if verbose(10):
665 msgb('ENABLE SUCCESSORS', str(cmd))
666 nodes = collections.deque()
667 for tgt in cmd.targets:
668 rtgt = os.path.realpath(tgt)
669 if verbose(11):
670 msgb('SUCCESSOR', tgt + " --> " + rtgt)
671 n = self.recs[ rtgt ]
672 self._scan_successors(nodes,n)
673 ready = self._ready_scan(nodes)
674 if verbose(10):
675 msgb("NEW READY VALUES", str(ready))
676 del nodes
677 return ready
678
680 """Add ready successors of n to nodes list"""
681 if verbose(10):
682 msgb('SCAN SUCCESSORS', n.file_name + " -> " +
683 str(n.files_that_depend_on_this))
684 for successor_fn in n.files_that_depend_on_this:
685 try:
686 successor_rec = self.recs[successor_fn]
687 if successor_rec.required and not successor_rec.needs_to_run:
688 if self._just_compute_all_parents_visited(successor_rec):
689 if self._just_compute_all_parents_completed(successor_rec):
690 if verbose(10):
691 msgb("LEAFSEARCH", "\tADDING: " +
692 successor_rec.file_name)
693
694
695 if successor_rec.added:
696 warn("Already added: " + successor_rec.file_name)
697 else:
698 successor_rec.added = True
699 successor_rec.needs_to_run = True
700 nodes.append(successor_rec)
701 else:
702 if verbose(10):
703 msgb("NOT ALL PARENTS COMPLETED", successor_fn)
704 else:
705 if verbose(10):
706 msgb("NOT ALL PARENTS VISITED", successor_fn)
707 else:
708 if verbose(10):
709 msgb("NOT REQUIRED/NOT NEEDED TO RUN", successor_fn)
710
711 except:
712 warn("node %s: did not find child node: %s" %
713 (n.file_name, successor_fn))
714 if verbose(10):
715 msgb('SCAN SUCCESSORS DONE')
716
718 """Return True if all the outputs of the command are visited
719 and unchanged. If any are not visited or any are changed,
720 return False."""
721 if not cmd.targets:
722 return True
723 for fn in cmd.targets:
724 rfn = os.path.realpath(fn)
725 vmsgb(20,"TESTING CMD TARGET:", rfn, pad = 4*' ')
726 if rfn in self.recs:
727 d = self.recs[rfn]
728 if d.visited == False:
729 vmsgb(20,"CMD TARGET NOT VISITED YET:", fn, pad=8*' ')
730 return False
731 if d.changed:
732 vmsgb(20,"CMD TARGET CHANGED:", fn, pad=8*' ')
733 return False
734 else:
735 vmsgb(20,"CMD TARGET NOT FOUND IN DAG:", fn, pad=8*' ')
736 vmsgb(20,"CMD TARGETS ALL VISITED AND UNCHANGED:", fn)
737 return True
738
740 """Process the nodes list and return a list of ready commands"""
741 vmsgb(20,'READY SCAN', '%d' % (len(nodes)))
742 readyd = dict()
743 vmsgb(20,"READYD0", str(readyd))
744
745
746
747 iters = 0
748 while len(nodes) != 0:
749 n = nodes.popleft()
750 iters+=1
751
752 parents_changed = self._just_compute_parent_changed(n)
753 vmsgb(20,"VISITING", n.file_name)
754 n.visited = True
755 if n.change_bit(self.required_set):
756 vmsgb(20,"LEAFSEARCH", "%d \tthis node %s CHANGED." %
757 (iters,n.file_name))
758 propagate_changed = True
759 n.needs_to_run = True
760 elif parents_changed:
761 vmsgb(20,"LEAFSEARCH", "%d \tsome parent of %s CHANGED." %
762 (iters,n.file_name))
763 n.changed = True
764 propagate_changed = True
765 n.needs_to_run = True
766 elif n.creator and \
767 not self._check_command_signature(n.creator):
768 vmsgb(20,"LEAFSEARCH", "%d\tthis node's command changed: %s." %
769 (iters,n.file_name))
770 n.changed = True
771 propagate_changed = True
772 n.needs_to_run = True
773 else:
774 vmsgb(20,"LEAFSEARCH", "%d\tUNCHANGED: %s." %
775 (iters,n.file_name))
776 propagate_changed = False
777
778 if n.creator:
779
780
781
782
783
784
785
786 if not propagate_changed:
787 vmsgb(20,"LEAFSEARCH", "\tTESTING CMD SUCCESSORS: " +
788 n.file_name)
789 if self._cmd_all_outputs_visited_and_unchanged(n.creator):
790 n.creator._complete()
791 vmsgb(20,"LEAFSEARCH", "\tMARK CREATOR CMD COMPLETED: " +
792 n.file_name)
793 else:
794 vmsgb(20,"LEAFSEARCH", "\tCMD OUTPUTS NOT FULLY SCANNED YET: " +
795 n.file_name)
796
797 else:
798 if n.creator._ready():
799 vmsgb(20,"LEAFSEARCH", "\tCMD READY: " + n.file_name)
800 if n.file_name not in readyd:
801 vmsgb(20,"LEAFSEARCH",
802 "\tADDING CREATOR TO READYD: " +
803 n.file_name)
804 readyd[n.file_name] = n
805 else:
806 vmsgb(20,"LEAFSEARCH",
807 "\tCREATOR ALREADY IN READYD: " +
808 n.file_name)
809
810 self._scan_successors(nodes,n)
811 vmsgb(20,"READYD", str(readyd))
812 ready = readyd.values()
813 return ready
814
816 vmsgb(20,"LOOKING FOR MISSING FILE", "%s assuming %s" %
817 (fn, assumed_directory))
818
819 if fn in self.recs:
820 vmsgb(20,"FOUND DEP REC FOR MISSING FILE", fn)
821 return self.recs[fn]
822 if assumed_directory:
823 nfn = util.join(assumed_directory, fn)
824 if nfn in self.recs:
825 vmsgb(20,"FOUND DEP REC FOR MISSING FILE(2)", nfn)
826 return self.recs[nfn]
827 nfn = os.path.realpath(nfn)
828 if nfn in self.recs:
829 vmsgb(20,"FOUND DEP REC FOR MISSING FILE(3)", nfn)
830 return self.recs[nfn]
831 vmsgb(20,"NO DEP REC FOR MISSING FILE", fn)
832 return None
833
835 """Make a list from a single object if the thing is not
836 already a list. If it is a list, just return the list"""
837 if isinstance(x,types.ListType):
838 return x
839 return [ x ]
840
842 """Scan xinput for headers. Add those headers to the list of
843 files that are inputs."""
844 to_scan = collections.deque()
845 to_scan.append(xinput)
846
847
848 while len(to_scan) != 0:
849 fn = to_scan.popleft()
850
851
852 r = self._check_add_dep_rec(fn)
853
854
855
856 if r.scanned:
857 continue
858
859
860
861 headers = scanner.mbuild_scan(fn, header_paths)
862 if verbose(2):
863 for hr in headers:
864 if hr.system:
865 sys="System "
866 else:
867 sys="NotSystem"
868 if hr.found:
869 fnd="Found "
870 else:
871 fnd="Missing"
872 msgb('HDR',"%s| %s| %s" %
873 ( sys, fnd, hr.file_name) )
874
875 r.scanned = True
876
877 for hr in headers:
878
879
880 if not hr.system:
881 scanned_header = True
882 if not hr.found:
883
884
885
886 ah = self._find_rec_for_missing_file(hr.file_name, assumed_directory)
887 if ah:
888 if verbose(2):
889 msgb("FOUND DEP REC FOR MISSING HEADER. WE WILL BUILD IT")
890 hr.file_name = ah.file_name
891 scanned_header = False
892 elif not self._check_required_file(hr.file_name):
893 if verbose(2):
894 msgb("MISSING HEADER NOT REQUIRED")
895 continue
896 elif assumed_directory:
897 ofn = hr.file_name
898 hr.file_name = util.join(assumed_directory, ofn)
899 if verbose(2):
900 msgb("ASSUMING",
901 "%s is in %s" % (ofn, assumed_directory))
902
903
904
905 hr.file_name = os.path.realpath(hr.file_name)
906
907
908 r.files_that_are_inputs.append(hr.file_name)
909 hdr_node = self._check_add_dep_rec(hr.file_name)
910 hdr_node.scanned_header = scanned_header
911 hdr_node.files_that_depend_on_this.append(fn)
912
913 if not hdr_node.scanned:
914 to_scan.append(hr.file_name)
915
916
918 if verbose(10):
919 msgb("MKDEP", file_name)
920 r = _mbuild_dep_record_t(file_name, creator)
921 if file_name in self.old_signatures:
922 r.old_signature = self.old_signatures[file_name].signature
923 return r
924
926 """Look to see if the file exists in our list of dependence
927 records. If not, add it. Return the found or created
928 record."""
929
930 rfn = os.path.realpath(fn)
931
932 if rfn not in self.recs:
933 r = self._make_dep_record(rfn, creator)
934 self.recs[rfn] = r
935 else:
936 r = self.recs[rfn]
937 return r
938
942
944 r = self._check_add_dep_rec(output)
945 self.required_set.add(r.file_name)
946 if creator != None:
947 if r.creator:
948 die("Two commands create " + output)
949 r.creator = creator
950 r.files_that_are_inputs.extend(creator.inputs)
951
953 """Produce a command_t to add to the workqueue or for
954 connecting to other commands by dependence chains"""
955 if d.env:
956
957 c = command_t( d.command, d.args, d.env )
958 elif d.args:
959 c = command_t( d.command, d.args)
960 else:
961 c = command_t( d.command )
962 if d.input:
963 c.inputs = self._make_list( d.input)
964 if d.output:
965 c.targets = self._make_list( d.output)
966
967 if hasattr(d,'name'):
968 c.name = d.name
969 return c
970
972 """We just added a new command c. Now we must make sure that
973 the commands that create this command's inputs come before
974 this command. Also the commands that use this command's output
975 output files as inputs come after it. Not all the commands may
976 be known yet, but by working symmetrically here, we'll get
977 them all eventually."""
978
979
980
981 for xinput in c.inputs:
982 try:
983 t = self.recs[xinput]
984 if t.creator:
985 if verbose(10):
986 msgb("CMD IDEP", xinput + " -> " + str(c.targets))
987 t.creator.add_after_me(c)
988 except:
989 pass
990
991
992
993
994 for output in c.targets:
995
996 if output not in self.recs:
997 die("Missing command for target " + output)
998 t = self.recs[output]
999 for f in t.files_that_depend_on_this:
1000 if f in self.recs:
1001 u = self.recs[f]
1002 if u.creator:
1003 if verbose(10):
1004 msgb("ODEP", output + ' -> ' +
1005 str(u.creator.targets))
1006 u.creator.add_before_me(c)
1007
1008
1010 """Return a list of L{command_t}'s that were executed for
1011 analysis of the build. If a command was not executed, it is
1012 not returned.
1013
1014 @rtype: list
1015 @return: A list of L{command_t} objects.
1016 """
1017 executed_commands = []
1018 for r in self.recs.itervalues():
1019 if r.creator:
1020 if r.creator.completed:
1021 executed_commands.append(r.creator)
1022 return executed_commands
1023
1024
1025 - def add(self,env,d):
1026 """Create a command based on the input dictionary or
1027 L{plan_t} object. It may have inputs and
1028 outputs. Things may have no input or output files. Return the
1029 created L{command_t}. The command object dependence
1030 tracking mechanism will control their execution.
1031
1032 @type env: L{env_t}
1033 @param env: the environment
1034 @type d: dict or L{plan_t}
1035 @param d: a dictionary or L{plan_t}
1036 from a builder describing the command.
1037 @rtype: L{command_t}
1038 @return: A command object for the dependence DAG
1039 """
1040 if verbose(12):
1041 msgb("DAG ADDING", str(d))
1042 if isinstance(d,types.DictType):
1043 q = self._convert_to_dagfood(d)
1044 c = self._add_dagfood(env,q)
1045 elif isinstance(d,plan_t):
1046 c = self._add_dagfood(env,d)
1047 else:
1048 die("Unhandled type: " + str(type(d)))
1049 if verbose(12):
1050 msgb("DAG ADDING", 'DONE')
1051
1052 return c
1053
1054
1056 nfn = strip_quotes(fn)
1057 r = os.path.realpath(nfn)
1058 if verbose(12):
1059 msgb("REALPATH", "%s -> %s" %(nfn, r), pad=' ')
1060 return r
1061
1063 x = self._make_list(x)
1064 n = []
1065 for fn in x:
1066 r = self._canonize_one_fn(fn)
1067 n.append( r )
1068 return n
1069
1071 x = self._make_list(x)
1072 n = []
1073 for fn in x:
1074 if os.path.exists(fn):
1075 r = self._canonize_one_fn(fn)
1076 n.append( r )
1077 else:
1078 n.append(fn)
1079 return n
1080
1082
1083 if d.input:
1084 d.input = env.expand_string(d.input)
1085 if d.output:
1086 d.output = env.expand_string(d.output)
1087
1088 c = self._make_command_object(d)
1089
1090 if verbose(12):
1091 msgb("CANONIZE INPUTS", pad=' ')
1092 c.inputs = self._canonize_fn(c.inputs)
1093 if verbose(12):
1094 msgb("CANONIZE TARGETS", pad=' ')
1095 c.targets = self._canonize_fn(c.targets)
1096
1097 for s in c.inputs:
1098 if verbose(10):
1099 msgb("ADD-INPUT", s, pad=' ')
1100 self._add_one_input(s,c)
1101
1102 for t in c.targets:
1103 if verbose(10):
1104 msgb("ADD-OUTPUT", t, pad=' ')
1105 self._add_one_output(t,c)
1106
1107 header_paths = env['CPPPATH']
1108 for s in c.inputs:
1109 self._scan_headers(s, header_paths, env['gen_dir'])
1110 return c
1111
1113 """Convert a dictionary to a plan_t"""
1114 q = plan_t(d['command'])
1115 try:
1116 q.args = d['args']
1117 except:
1118 pass
1119 try:
1120 q.input = d['input']
1121 except:
1122 pass
1123 try:
1124 q.output = d['output']
1125 except:
1126 pass
1127 try:
1128 q.env = d['env']
1129 except:
1130 pass
1131 return q
1132