| Home | Trees | Indices | Help |
|
|---|
|
|
1 # Copyright 2009, Peter A. Bigot
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain a
5 # copy of the License at:
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
13 # under the License.
14
15 """Classes and global objects related to archiving U{XML
16 Namespaces<http://www.w3.org/TR/2006/REC-xml-names-20060816/index.html>}."""
17
18 import pyxb
19 import os
20 import fnmatch
21 import pyxb.utils.utility
22 import utility
23
24 PathEnvironmentVariable = 'PYXB_ARCHIVE_PATH'
25 """Environment variable from which default path to pre-loaded namespaces is
26 read. The value should be a colon-separated list of absolute paths. The
27 character C{&} at the start of a member of the list is replaced by the path to
28 the directory where the %{pyxb} modules are found, including a trailing C{/}.
29 For example, use C{&pyxb/bundles//} to enable search of any archive bundled
30 with PyXB.
31
32 @note: If you put a path separater between C{&} and the following path, this
33 will cause the substitution to be ignored."""
34
35 import os.path
36 import stat
37
38 DefaultArchivePrefix = os.path.realpath(os.path.join(os.path.dirname( __file__), '../..'))
39 """The default archive prefix, substituted for C{&} in C{PYXB_ARCHIVE_PATH}."""
42 """Return the archive path as defined by the L{PathEnvironmentVariable},
43 or C{None} if that variable is not defined."""
44 import os
45 return os.environ.get(PathEnvironmentVariable)
46
47 # Stuff required for pickling
48 import cPickle as pickle
49 #import pyxb.utils.pickle_trace as pickle
50
51 import re
54 """Represent a file from which one or more namespaces can be read, or to
55 which they will be written."""
56
57 # A code used to identify the format of the archive, so we don't
58 # mis-interpret its contents.
59 # YYYYMMDDHHMM
60 __PickleFormat = '200907190858'
61
62 @classmethod
64 """The category name to use when storing references to anonymous type
65 definitions. For example, attribute definitions defined within an
66 attribute use in a model group definition.that can be referenced frojm
67 ax different namespace."""
68 return cls.__AnonymousCategory
69 __AnonymousCategory = '_anonymousTypeDefinition'
70
71 @classmethod
73 """Return a reference to a set specifying the namespace instances that
74 are being archived.
75
76 This is needed to determine whether a component must be serialized as
77 aa reference."""
78 # NB: Use root class explicitly. If we use cls, when this is invoked
79 # by subclasses it gets mangled using the subclass name so the one
80 # defined in this class is not found
81 return NamespaceArchive.__PicklingArchive
82 # Class variable recording the namespace that is currently being
83 # pickled. Used to prevent storing components that belong to
84 # other namespaces. Should be None unless within an invocation of
85 # SaveToFile.
86 __PicklingArchive = None
87
88 __NamespaceArchives = None
89 """A mapping from generation UID to NamespaceArchive instances."""
90
92 """Remove this archive from the set of available archives.
93
94 This is invoked when an archive contains a namespace that the user has
95 specified should not be loaded."""
96 del self.__NamespaceArchives[self.generationUID()]
97 for ns in self.__namespaces:
98 ns._removeArchive(self)
99
100 @classmethod
102 """Return a L{NamespaceArchive} instance associated with the given file.
103
104 To the extent possible, the same file accessed through different paths
105 returns the same L{NamespaceArchive} instance.
106 """
107
108 nsa = NamespaceArchive(archive_path=archive_file, stage=cls._STAGE_uid)
109 rv = cls.__NamespaceArchives.get(nsa.generationUID(), nsa)
110 if rv == nsa:
111 cls.__NamespaceArchives[rv.generationUID()] = rv
112 rv._readToStage(stage)
113 return rv
114
115 __ArchivePattern_re = re.compile('\.wxs$')
116
117 @classmethod
119 """Scan for available archives, associating them with namespaces.
120
121 This only validates potential archive contents; it does not load
122 namespace data from the archives. If invoked with no arguments,
123
124 @keyword archive_path: A colon-separated list of files or directories in
125 which namespace archives can be found; see L{PathEnvironmentVariable}.
126 Defaults to L{GetArchivePath()}. If not defaulted, C{reset} will be
127 forced to C{True}. For any directory in the path, all files ending with
128 C{.wxs} are examined.
129
130 @keyword required_archive_files: A list of paths to files that must
131 resolve to valid namespace archives.
132
133 @keyword reset: If C{False} (default), the most recently read set of
134 archives is returned; if C{True}, the archive path is re-scanned and the
135 namespace associations validated.
136
137 @return: A list of L{NamespaceArchive} instances corresponding to the
138 members of C{required_archive_files}, in order. If
139 C{required_archive_files} was not provided, returns an empty list.
140
141 @raise pickle.UnpicklingError: a C{required_archive_files} member does not
142 contain a valid namespace archive.
143 """
144
145 import builtin
146
147 reset = reset or (archive_path is not None) or (required_archive_files is not None) or (cls.__NamespaceArchives is None)
148 required_archives = []
149 if reset:
150 # Get a list of pre-existing archives, initializing the map if
151 # this is the first time through.
152 if cls.__NamespaceArchives is None:
153 cls.__NamespaceArchives = { }
154 existing_archives = set(cls.__NamespaceArchives.values())
155 archive_set = set(required_archives)
156
157 # Get archives for all required files
158 if required_archive_files is not None:
159 for afn in required_archive_files:
160 required_archives.append(cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules))
161
162 # Ensure we have an archive path. If not, don't do anything.
163 if archive_path is None:
164 archive_path = GetArchivePath()
165 if archive_path is not None:
166
167 # Get archive instances for everything in the archive path
168 candidate_files = pyxb.utils.utility.GetMatchingFiles(archive_path, cls.__ArchivePattern_re,
169 default_path_wildcard='+', default_path=GetArchivePath(),
170 prefix_pattern='&', prefix_substituend=DefaultArchivePrefix)
171 for afn in candidate_files:
172 #print 'Considering %s' % (afn,)
173 try:
174 nsa = cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules)
175 archive_set.add(nsa)
176 except pickle.UnpicklingError, e:
177 print 'Cannot use archive %s: %s' % (afn, e)
178 except pyxb.NamespaceArchiveError, e:
179 print 'Cannot use archive %s: %s' % (afn, e)
180
181 # Do this for two reasons: first, to get an iterable that won't
182 # cause problems when we remove unresolvable archives from
183 # archive_set; and second to aid with forced dependency inversion
184 # testing
185 ordered_archives = sorted(list(archive_set), lambda _a,_b: cmp(_a.archivePath(), _b.archivePath()))
186 ordered_archives.reverse()
187
188 # Create a graph that identifies dependencies between the archives
189 archive_map = { }
190 for a in archive_set:
191 archive_map[a.generationUID()] = a
192 archive_graph = pyxb.utils.utility.Graph()
193 for a in ordered_archives:
194 prereqs = a._unsatisfiedModulePrerequisites()
195 if 0 < len(prereqs):
196 for p in prereqs:
197 if builtin.BuiltInObjectUID == p:
198 continue
199 da = archive_map.get(p)
200 if da is None:
201 print 'WARNING: %s depends on unavailable archive %s' % (a, p)
202 archive_set.remove(a)
203 else:
204 #print '%s depends on %s' % (a, da)
205 archive_graph.addEdge(a, da)
206 else:
207 #print '%s has no dependencies' % (a,)
208 archive_graph.addRoot(a)
209
210 # Verify that there are no dependency loops.
211 archive_scc = archive_graph.sccOrder()
212 for scc in archive_scc:
213 if 1 < len(scc):
214 raise pyxb.LogicError("Cycle in archive dependencies. How'd you do that?\n " + "\n ".join([ _a.archivePath() for _a in scc ]))
215 archive = scc[0]
216 if not (archive in archive_set):
217 #print 'Discarding unresolvable %s' % (archive,)
218 archive.discard()
219 existing_archives.remove(archive)
220 continue
221 #print 'Completing load of %s' % (archive,)
222 #archive._readToStage(cls._STAGE_COMPLETE)
223
224 # Discard any archives that we used to know about but now aren't
225 # supposed to. @todo make this friendlier in the case of archives
226 # we've already incorporated.
227 for archive in existing_archives.difference(archive_set):
228 print 'Discarding excluded archive %s' % (archive,)
229 archive.discard()
230
231 return required_archives
232
236 __archivePath = None
237
239 """The unique identifier for the generation that produced this archive."""
240 return self.__generationUID
241 __generationUID = None
242
244 """Return C{True} iff it is permissible to load the archive.
245 Archives created for output cannot be loaded."""
246 return self.__isLoadable
247 __isLoadable = None
248
250 self.__moduleRecords = set()
251 namespaces = set()
252 for ns in pyxb.namespace.utility.AvailableNamespaces():
253 # @todo allow these; right now it's usually the XML
254 # namespace and we're not prepared to reconcile
255 # redefinitions of those components.
256 if ns.isUndeclaredNamespace():
257 continue
258 mr = ns.lookupModuleRecordByUID(self.generationUID())
259 if mr is not None:
260 namespaces.add(ns)
261 mr.prepareForArchive(self)
262 self.__moduleRecords.add(mr)
263 self.__namespaces.update(namespaces)
265 """Return the set of L{module records <ModuleRecord>} stored in this
266 archive.
267
268 Each module record represents"""
269 return self.__moduleRecords
270 __moduleRecords = None
271
272 @classmethod
274 """Return the L{NamespaceArchive} instance that can be found at the
275 given path."""
276 return cls.__GetArchiveInstance(archive_file)
277
278 # States in the finite automaton that is used to read archive contents.
279 _STAGE_UNOPENED = 0 # Haven't even checked for existence
280 _STAGE_uid = 1 # Verified archive exists, obtained generation UID from it
281 _STAGE_readModules = 2 # Read module records from archive, which includes UID dependences
282 _STAGE_validateModules = 3 # Verified pre-requisites for module loading
283 _STAGE_readComponents = 4 # Extracted components from archive and integrated into namespaces
284 _STAGE_COMPLETE = _STAGE_readComponents
285
287 return self.__stage
288 __stage = None
289
291 """Create a new namespace archive.
292
293 If C{namespaces} is given, this is an output archive.
294
295 If C{namespaces} is absent, this is an input archive.
296
297 @raise IOError: error attempting to read the archive file
298 @raise pickle.UnpicklingError: something is wrong with the format of the library
299 """
300 self.__namespaces = set()
301 if generation_uid is not None:
302 if archive_path:
303 raise pyxb.LogicError('NamespaceArchive: cannot define both namespaces and archive_path')
304 self.__generationUID = generation_uid
305 self.__locateModuleRecords()
306 elif archive_path is not None:
307 if generation_uid is not None:
308 raise pyxb.LogicError('NamespaceArchive: cannot provide generation_uid with archive_path')
309 self.__archivePath = archive_path
310 self.__stage = self._STAGE_UNOPENED
311 self.__isLoadable = loadable
312 if self.__isLoadable:
313 if stage is None:
314 stage = self._STAGE_moduleRecords
315 self._readToStage(stage)
316 else:
317 pass
318
320 """Add the given namespace to the set that is to be stored in this archive."""
321 if namespace.isAbsentNamespace():
322 raise pyxb.NamespaceArchiveError('Cannot archive absent namespace')
323 self.__namespaces.add(namespace)
324
326 """Add the given namespaces to the set that is to be stored in this archive."""
327 [ self.add(_ns) for _ns in namespace_set ]
328
332 __namespaces = None
333
335 # @todo: support StringIO instances?
336 if not isinstance(output, file):
337 output = open(output, 'wb')
338 pickler = pickle.Pickler(output, -1)
339
340 # The format of the archive
341 pickler.dump(NamespaceArchive.__PickleFormat)
342
343 # The UID for the set
344 assert self.generationUID() is not None
345 pickler.dump(self.generationUID())
346
347 return pickler
348
350 unpickler = pickle.Unpickler(open(self.__archivePath, 'rb'))
351
352 format = unpickler.load()
353 if self.__PickleFormat != format:
354 raise pyxb.NamespaceArchiveError('Archive format is %s, require %s' % (format, self.__PickleFormat))
355
356 self.__generationUID = unpickler.load()
357
358 return unpickler
359
361 #print 'RM %x %s' % (id(self), self)
362 mrs = unpickler.load()
363 assert isinstance(mrs, set), 'Expected set got %s from %s' % (type(mrs), self.archivePath())
364 if self.__moduleRecords is None:
365 for mr in mrs.copy():
366 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
367 if mr2 is not None:
368 mr2._setFromOther(mr, self)
369 #print 'Replaced locally defined %s with archive data' % (mr2,)
370 mrs.remove(mr)
371 self.__moduleRecords = set()
372 assert 0 == len(self.__namespaces)
373 for mr in mrs:
374 mr._setArchive(self)
375 ns = mr.namespace()
376 ns.addModuleRecord(mr)
377 self.__namespaces.add(ns)
378 self.__moduleRecords.add(mr)
379 else:
380 # Verify the archive still has what was in it when we created this.
381 for mr in mrs:
382 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
383 if not (mr2 in self.__moduleRecords):
384 raise pyxb.NamespaceArchiveError('Lost module record %s %s from %s' % (mr.namespace(), mr.generationUID(), self.archivePath()))
385
387 prereq_uids = set()
388 for mr in self.__moduleRecords:
389 ns = mr.namespace()
390 #print 'Namespace %s records:' % (ns,)
391 #for xmr in ns.moduleRecords():
392 # print ' %s' % (xmr,)
393 prereq_uids.update(mr.dependsOnExternal())
394 return prereq_uids
395
397 import builtin
398 prereq_uids = self._unsatisfiedModulePrerequisites()
399 #print '%s depends on %d prerequisites' % (self, len(prereq_uids))
400 for uid in prereq_uids:
401 if builtin.BuiltInObjectUID == uid:
402 continue
403 depends_on = self.__NamespaceArchives.get(uid)
404 if depends_on is None:
405 raise pyxb.NamespaceArchiveError('%s: archive depends on unavailable archive %s' % (self.archivePath(), uid))
406 #print '%s stage %s depends on %s at %s going to %s' % (self, self._stage(), depends_on, depends_on._stage(), stage)
407 depends_on._readToStage(stage)
408
410 self.__validatePrerequisites(self._STAGE_validateModules)
411 for mr in self.__moduleRecords:
412 ns = mr.namespace()
413 #print 'Namespace %s records:' % (ns,)
414 #for xmr in ns.moduleRecords():
415 # print ' %s' % (xmr,)
416 for base_uid in mr.dependsOnExternal():
417 xmr = ns.lookupModuleRecordByUID(base_uid)
418 if xmr is None:
419 raise pyxb.NamespaceArchiveError('Module %s depends on external module %s, not available in archive path' % (mr.generationUID(), base_uid))
420 if not xmr.isIncorporated():
421 print 'Need to incorporate data from %s' % (xmr,)
422 else:
423 print 'Have required base data %s' % (xmr,)
424
425 for origin in mr.origins():
426 #print 'mr %s origin %s' % (mr, origin)
427 for (cat, names) in origin.categoryMembers().iteritems():
428 if not (cat in ns.categories()):
429 continue
430 cross_objects = names.intersection(ns.categoryMap(cat).keys())
431 if 0 < len(cross_objects):
432 raise pyxb.NamespaceArchiveError('Archive %s namespace %s module %s origin %s archive/active conflict on category %s: %s' % (self.__archivePath, ns, mr, origin, cat, " ".join(cross_objects)))
433 print '%s no conflicts on %d names' % (cat, len(names))
434
436 self.__validatePrerequisites(self._STAGE_readComponents)
437 print 'RCS %s' % (self,)
438 for n in range(len(self.__moduleRecords)):
439 ns = unpickler.load()
440 mr = ns.lookupModuleRecordByUID(self.generationUID())
441 assert mr in self.__moduleRecords
442 assert not mr.isIncorporated()
443 objects = unpickler.load()
444 mr._loadCategoryObjects(objects)
445
446 __unpickler = None
448 if self.__stage is None:
449 raise pyxb.NamespaceArchiveError('Attempt to read from invalid archive %s' % (self,))
450 try:
451 while self.__stage < stage:
452 #print 'RTS %s want %s' % (self.__stage, stage)
453 if self.__stage < self._STAGE_uid:
454 self.__unpickler = self.__createUnpickler()
455 self.__stage = self._STAGE_uid
456 continue
457 if self.__stage < self._STAGE_readModules:
458 assert self.__unpickler is not None
459 self.__readModules(self.__unpickler)
460 self.__stage = self._STAGE_readModules
461 continue
462 if self.__stage < self._STAGE_validateModules:
463 self.__validateModules()
464 self.__stage = self._STAGE_validateModules
465 continue
466 if self.__stage < self._STAGE_readComponents:
467 assert self.__unpickler is not None
468 self.__stage = self._STAGE_readComponents
469 self.__readComponentSet(self.__unpickler)
470 self.__unpickler = None
471 continue
472 raise pyxb.LogicError('Too many stages (at %s, want %s)' % (self.__stage, stage))
473 except:
474 self.__stage = None
475 self.__unpickler = None
476 raise
477
479 """Read all the components from this archive, integrating them into
480 their respective namespaces."""
481 self._readToStage(self._STAGE_COMPLETE)
482
484 """Store the namespaces into the archive.
485
486 @param output: An instance substitutable for a writable file, or the
487 name of a file to write to.
488 """
489 import sys
490
491 assert NamespaceArchive.__PicklingArchive is None
492 NamespaceArchive.__PicklingArchive = self
493 assert self.__moduleRecords is not None
494
495 # Recalculate the record/object associations: we didn't assign
496 # anonymous names to the indeterminate scope objects because they
497 # weren't needed for bindings, but they are needed in the archive.
498 for mr in self.__moduleRecords:
499 mr.namespace()._associateOrigins(mr)
500
501 try:
502 # See http://bugs.python.org/issue3338
503 recursion_limit = sys.getrecursionlimit()
504 sys.setrecursionlimit(10 * recursion_limit)
505
506 pickler = self.__createPickler(output)
507
508 assert isinstance(self.__moduleRecords, set)
509 print "\n".join([ str(_mr) for _mr in self.__moduleRecords ])
510 pickler.dump(self.__moduleRecords)
511
512 for mr in self.__moduleRecords:
513 pickler.dump(mr.namespace())
514 pickler.dump(mr.categoryObjects())
515 finally:
516 sys.setrecursionlimit(recursion_limit)
517 NamespaceArchive.__PicklingArchive = None
518
520 archive_path = self.__archivePath
521 if archive_path is None:
522 archive_path = '??'
523 return 'NSArchive@%s' % (archive_path,)
524
526 """Mix-in to any object that can be stored in a namespace within an archive."""
527
528 # Need to set this per category item
529 __objectOrigin = None
531 return self.__objectOrigin
533 if (self.__objectOrigin is not None) and (not override):
534 if self.__objectOrigin != object_origin:
535 raise pyxb.LogicError('Inconsistent origins for object %s: %s %s' % (self, self.__objectOrigin, object_origin))
536 else:
537 self.__objectOrigin = object_origin
538
540 #assert self.__objectOrigin is not None
541 if self._objectOrigin() is not None:
542 return getattr(super(_ArchivableObject_mixin, self), '_prepareForArchive_csc', lambda *_args,**_kw: self)(self._objectOrigin().moduleRecord())
543 assert not isinstance(self, pyxb.xmlschema.structures._NamedComponent_mixin)
544
546 return getattr(super(_ArchivableObject_mixin, self), '_updateFromOther_csc', lambda *_args,**_kw: self)(other)
547
549 """Update this instance with additional information provided by the other instance.
550
551 This is used, for example, when a built-in type is already registered
552 in the namespace, but we've processed the corresponding schema and
553 have obtained more details."""
554 assert self != other
555 return self._updateFromOther_csc(other)
556
558 import builtin
559 assert self._objectOrigin()
560 return builtin.BuiltInObjectUID == self._objectOrigin().generationUID()
561
563 """Encapsulate the operations and data relevant to archiving namespaces.
564
565 This class mixes-in to L{pyxb.namespace.Namespace}"""
566
568 """CSC extension to reset fields of a Namespace.
569
570 This one handles category-related data."""
571 getattr(super(_NamespaceArchivable_mixin, self), '_reset', lambda *args, **kw: None)()
572 self.__loadedFromArchive = None
573 self.__wroteToArchive = None
574 self.__active = False
575 self.__moduleRecordMap = {}
576
578 return self.__loadedFromArchive
579
580 __wroteToArchive = None
581 __loadedFromArchive = None
582
584 if self.__isActive and empty_inactive:
585 for (ct, cm) in self._categoryMap().items():
586 if 0 < len(cm):
587 print '%s: %d %s -- activated' % (self, len(cm), ct)
588 return True
589 return False
590 return self.__isActive
591
596 __isActive = None
597
600
606
608 # Yes, I do want this to raise KeyError if the archive is not present
609 mr = self.__moduleRecordMap[archive.generationUID()]
610 assert not mr.isIncorporated(), 'Removing archive %s after incorporation' % (archive.archivePath(),)
611 # print 'removing %s' % (mr,)
612 del self.__moduleRecordMap[archive.generationUID()]
613
615 """Return C{True} iff the component model for this namespace can be
616 loaded from a namespace archive."""
617 for mr in self.moduleRecords():
618 if mr.isLoadable():
619 return True
620 return False
621
623 """Return the list of archives from which components for this
624 namespace can be loaded."""
625 rv = []
626 for mr in self.moduleRecords():
627 if mr.isLoadable():
628 rv.append(mr.archive())
629 return rv
630
633 __moduleRecordMap = None
634
636 assert isinstance(module_record, ModuleRecord)
637 # This assertion will fail if the binding is loaded before its archive
638 # is scanned.
639 assert not (module_record.generationUID() in self.__moduleRecordMap)
640 self.__moduleRecordMap[module_record.generationUID()] = module_record
641 return module_record
643 rv = self.__moduleRecordMap.get(generation_uid)
644 if (rv is None) and create_if_missing:
645 rv = self.addModuleRecord(ModuleRecord(self, generation_uid, *args, **kw))
646 return rv
647
649 #assert not self.__isActive, 'ERROR: State set for active namespace %s' % (self,)
650 return getattr(super(_NamespaceArchivable_mixin, self), '_getState_csc', lambda _kw: _kw)(kw)
651
653 """Prevent loading this namespace from an archive.
654
655 This marks all archives in which the namespace appears, whether
656 publically or privately, as not loadable."""
657 if self._loadedFromArchive():
658 raise pyxb.NamespaceError(self, 'cannot mark not loadable when already loaded')
659 for mr in self.moduleRecords():
660 mr._setIsLoadable(False)
661
663 __PrivateTransient = set()
664
666 return self.__namespace
667 __namespace = None
668
670 return self.__archive
674 __archive = None
675 __PrivateTransient.add('archive')
676
678 return self.__isPublic
682 __isPublic = None
683
687 assert self.__isLoadable
688 self.__isIncorporated = True
689 self.__isLoadable = False
690 return self
691 __isIncorporated = None
692 __PrivateTransient.add('isIncorporated')
693
699 __isLoadable = None
700
702 return self.__generationUID
703 __generationUID = None
704
708 assert isinstance(origin, _ObjectOrigin)
709 assert not (origin.signature() in self.__originMap)
710 self.__originMap[origin.signature()] = origin
711 return origin
715 if self.__originMap is None:
716 self.__originMap = {}
717 else:
718 self.__originMap.clear()
719 [ self.addOrigin(_o) for _o in origins ]
720 return self
721 __originMap = None
722
728
730 return self.__modulePath
732 assert (module_path is None) or isinstance(module_path, basestring)
733 self.__modulePath = module_path
734 return self
735 __modulePath = None
736
738 return self.__module
740 self.__module = module
741 # This is a nice idea, but screws up the unit tests that already have
742 # ns1 and the like logged as expected prefixes. Only causes a problem
743 # when the tests are run individually; dunno why.
744 #ns = self.namespace()
745 #if (ns.prefix() is None) and (module is not None):
746 # try:
747 # ns.setPrefix(os.path.basename(os.path.normpath(module.__file__)).split('.')[0])
748 # except AttributeError:
749 # pass
750 return self
751 __module = None
752 __PrivateTransient.add('module')
753
755 return self.__referencedNamespaces
762 __referencedNamespaces = None
763
764 __constructedLocally = False
765 __PrivateTransient.add('constructedLocally')
766
768 import builtin
769
770 super(ModuleRecord, self).__init__()
771 self.__namespace = namespace
772 #print 'Created MR for %s gen %s' % (namespace, generation_uid)
773 assert (generation_uid != builtin.BuiltInObjectUID) or namespace.isBuiltinNamespace()
774 self.__isPublic = kw.get('is_public', False)
775 self.__isIncoporated = kw.get('is_incorporated', False)
776 self.__isLoadable = kw.get('is_loadable', True)
777 assert isinstance(generation_uid, pyxb.utils.utility.UniqueIdentifier)
778 self.__generationUID = generation_uid
779 self.__modulePath = kw.get('module_path')
780 self.__module = kw.get('module')
781 self.__originMap = {}
782 self.__referencedNamespaces = set()
783 self.__categoryObjects = { }
784 self.__constructedLocally = True
785 self.__dependsOnExternal = set()
786
788 if (not self.__constructedLocally) or other.__constructedLocally:
789 raise pyxb.ImplementationError('Module record update requires local to be updated from archive')
790 assert self.__generationUID == other.__generationUID
791 assert self.__archive is None
792 self.__isPublic = other.__isPublic
793 assert not self.__isIncorporated
794 self.__isLoadable = other.__isLoadable
795 self.__modulePath = other.__modulePath
796 # self.__module already set correctly
797 self.__originMap.update(other.__originMap)
798 self.__referencedNamespaces.update(other.__referencedNamespaces)
799 if not (other.__categoryObjects is None):
800 self.__categoryObjects.update(other.__categoryObjects)
801 self.__dependsOnExternal.update(other.__dependsOnExternal)
802 self._setArchive(archive)
803
805 return self.__categoryObjects
807 self.__categoryObjects.clear()
808 for origin in self.origins():
809 origin.resetCategoryMembers()
814 assert self.__categoryObjects is None
815 assert not self.__constructedLocally
816 ns = self.namespace()
817 ns.configureCategories(category_objects.keys())
818 for (cat, obj_map) in category_objects.iteritems():
819 current_map = ns.categoryMap(cat)
820 for (local_name, component) in obj_map.iteritems():
821 existing_component = current_map.get(local_name)
822 if existing_component is None:
823 current_map[local_name] = component
824 elif existing_component._allowUpdateFromOther(component):
825 existing_component._updateFromOther(component)
826 else:
827 raise pyxb.NamespaceError(self, 'Load attempted to override %s %s in %s' % (cat, local_name, self.namespace()))
828 self.markIncorporated()
829 __categoryObjects = None
830 __PrivateTransient.add('categoryObjects')
831
833 return self.__dependsOnExternal
834 __dependsOnExternal = None
835
837 assert self.archive() is None
838 self._setArchive(archive)
839 ns = self.namespace()
840 self.__dependsOnExternal.clear()
841 for mr in ns.moduleRecords():
842 if mr != self:
843 print 'This gen depends on %s' % (mr,)
844 self.__dependsOnExternal.add(mr.generationUID())
845 for obj in ns._namedObjects().union(ns.components()):
846 if isinstance(obj, _ArchivableObject_mixin):
847 if obj._objectOrigin():
848 obj._prepareForArchive(self)
849 #print 'Archive %s ns %s module %s has %d origins' % (self.archive(), self.namespace(), self, len(self.origins()))
850
852 self.namespace()._transferReferencedNamespaces(self)
853 self.namespace()._associateOrigins(self)
854
857
859 """Marker class for objects that can serve as an origin for an object in a
860 namespace."""
861 __PrivateTransient = set()
862
864 return self.__signature
865 __signature = None
866
868 return self.__moduleRecord
869 __moduleRecord = None
870
873
876
878 self.__signature = kw.pop('signature', None)
879 super(_ObjectOrigin, self).__init__(**kw)
880 self.__moduleRecord = namespace.lookupModuleRecordByUID(generation_uid, create_if_missing=True, **kw)
881 self.__moduleRecord.addOrigin(self)
882 self.__categoryMembers = { }
883 self.__categoryObjectMap = { }
884
886 self.__categoryMembers.clear()
887 self.__categoryObjectMap.clear()
888 self.__originatedComponents = None
890 self.__categoryMembers.setdefault(category, set()).add(name)
891 self.__categoryObjectMap.setdefault(category, {})[name] = obj
892 self.__moduleRecord._addCategoryObject(category, name, obj)
894 return self.__categoryMembers
896 if self.__originatedObjects is None:
897 components = set()
898 [ components.update(_v.values()) for _v in self.__categoryObjectMap.itervalues() ]
899 self.__originatedObjects = frozenset(components)
900 return self.__originatedObjects
901
902 # The set of category names associated with objects. Don't throw this
903 # away and use categoryObjectMap.keys() instead: that's transient, and we
904 # need this to have a value when read from an archive.
905 __categoryMembers = None
906
907 # Map from category name to a map from an object name to the object
908 __categoryObjectMap = None
909 __PrivateTransient.add('categoryObjectMap')
910
911 # The set of objects that originated at this origin
912 __originatedObjects = None
913 __PrivateTransient.add('originatedObjects')
914
916 """Holds the data regarding components derived from a single schema.
917
918 Coupled to a particular namespace through the
919 L{_NamespaceComponentAssociation_mixin}.
920 """
921
922 __PrivateTransient = set()
923
925 schema = kw.get('schema')
926 if schema is not None:
927 assert not ('location' in kw)
928 kw['location'] = schema.location()
929 assert not ('signature' in kw)
930 kw['signature'] = schema.signature()
931 assert not ('generation_uid' in kw)
932 kw['generation_uid'] = schema.generationUID()
933 assert not ('namespace' in kw)
934 kw['namespace'] = schema.targetNamespace()
935 assert not ('version' in kw)
936 kw['version'] = schema.schemaAttribute('version')
937
939 """Determine whether this record matches the parameters.
940
941 @keyword schema: a L{pyxb.xmlschema.structures.Schema} instance from
942 which the other parameters are obtained.
943 @keyword location: a schema location (URI)
944 @keyword signature: a schema signature
945 @return: C{True} iff I{either} C{location} or C{signature} matches."""
946 self.__setDefaultKW(kw)
947 location = kw.get('location')
948 if (location is not None) and (self.location() == location):
949 return True
950 signature = kw.get('signature')
951 if (signature is not None) and (self.signature() == signature):
952 return True
953 return False
954
956 return self.__location
957 __location = None
958
960 return self.__schema
961 __schema = None
962 __PrivateTransient.add('schema')
963
965 return self.__version
966 __version = None
967
969 self.__setDefaultKW(kw)
970 self.__schema = kw.pop('schema', None)
971 self.__location = kw.pop('location', None)
972 self.__version = kw.pop('version', None)
973 super(_SchemaOrigin, self).__init__(kw.pop('namespace'), kw.pop('generation_uid'), **kw)
974
981
983 """Return a list of namespace URIs for which we may be able to load the
984 namespace contents from a pre-parsed file. The corresponding L{Namespace}
985 can be retrieved using L{NamespaceForURI}, and the declared objects in
986 that namespace loaded with L{Namespace.validateComponentModel}.
987
988 Note that success of the load is not guaranteed if the packed file
989 is not compatible with the schema class being used."""
990 # Invoke this to ensure we have searched for loadable namespaces
991 return _LoadableNamespaceMap().keys()
992
994
996 return self.__rootNamespaces
997 __rootNamespaces = None
998
1000 if reset or (self.__namespaceGraph is None):
1001 self.__namespaceGraph = pyxb.utils.utility.Graph()
1002 map(self.__namespaceGraph.addRoot, self.rootNamespaces())
1003
1004 # Make sure all referenced namespaces have valid components
1005 need_check = self.__rootNamespaces.copy()
1006 done_check = set()
1007 while 0 < len(need_check):
1008 ns = need_check.pop()
1009 ns.validateComponentModel()
1010 self.__namespaceGraph.addNode(ns)
1011 for rns in ns.referencedNamespaces().union(ns.importedNamespaces()):
1012 self.__namespaceGraph.addEdge(ns, rns)
1013 if not rns in done_check:
1014 need_check.add(rns)
1015 if not ns.hasSchemaComponents():
1016 print 'WARNING: Referenced %s has no schema components' % (ns.uri(),)
1017 done_check.add(ns)
1018 assert done_check == self.__namespaceGraph.nodes()
1019
1020 return self.__namespaceGraph
1021 __namespaceGraph = None
1022
1025
1027 siblings = set()
1028 ns_graph = self.namespaceGraph(reset)
1029 for ns in self.__rootNamespaces:
1030 ns_siblings = ns_graph.sccMap().get(ns)
1031 if ns_siblings is not None:
1032 siblings.update(ns_siblings)
1033 else:
1034 siblings.add(ns)
1035 return siblings
1036
1038 if self.__siblingNamespaces is None:
1039 self.__siblingNamespaces = self.siblingsFromGraph()
1040 return self.__siblingNamespaces
1041
1043 self.__siblingNamespaces = sibling_namespaces
1044
1045 __siblingNamespaces = None
1046
1049
1051 if reset or (self.__componentGraph is None):
1052 self.__componentGraph = pyxb.utils.utility.Graph()
1053 all_components = set()
1054 for ns in self.siblingNamespaces():
1055 [ all_components.add(_c) for _c in ns.components() if _c.hasBinding() ]
1056
1057 need_visit = all_components.copy()
1058 while 0 < len(need_visit):
1059 c = need_visit.pop()
1060 self.__componentGraph.addNode(c)
1061 for cd in c.bindingRequires(include_lax=True):
1062 if cd in all_components:
1063 self.__componentGraph.addEdge(c, cd)
1064 return self.__componentGraph
1065 __componentGraph = None
1066
1069
1071 namespace_set = set(kw.get('namespace_set', []))
1072 namespace = kw.get('namespace')
1073 if namespace is not None:
1074 namespace_set.add(namespace)
1075 if 0 == len(namespace_set):
1076 raise pyxb.LogicError('NamespaceDependencies requires at least one root namespace')
1077 self.__rootNamespaces = namespace_set
1078
1079
1080 ## Local Variables:
1081 ## fill-column:78
1082 ## End:
1083
| Home | Trees | Indices | Help |
|
|---|
| Generated by Epydoc 3.0.1 on Fri Sep 9 14:08:53 2011 | http://epydoc.sourceforge.net |