| Home | Trees | Indices | Help |
|
|---|
|
|
1 # Copyright 2009, Peter A. Bigot
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"); you may
4 # not use this file except in compliance with the License. You may obtain a
5 # copy of the License at:
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12 # License for the specific language governing permissions and limitations
13 # under the License.
14
15 """Classes and global objects related to archiving U{XML
16 Namespaces<http://www.w3.org/TR/2006/REC-xml-names-20060816/index.html>}."""
17
18 import pyxb
19 import os
20 import fnmatch
21 import pyxb.utils.utility
22 import utility
23
24 PathEnvironmentVariable = 'PYXB_ARCHIVE_PATH'
25 """Environment variable from which default path to pre-loaded namespaces is
26 read. The value should be a colon-separated list of absolute paths. The
27 character C{&} at the start of a member of the list is replaced by the path to
28 the directory where the %{pyxb} modules are found, including a trailing C{/}.
29 For example, use C{&pyxb/bundles//} to enable search of any archive bundled
30 with PyXB.
31
32 @note: If you put a path separater between C{&} and the following path, this
33 will cause the substitution to be ignored."""
34
35 import os.path
36 import stat
37
38 DefaultArchivePrefix = os.path.realpath(os.path.join(os.path.dirname( __file__), '../..'))
39 """The default archive prefix, substituted for C{&} in C{PYXB_ARCHIVE_PATH}."""
42 """Return the archive path as defined by the L{PathEnvironmentVariable},
43 or C{None} if that variable is not defined."""
44 import os
45 return os.environ.get(PathEnvironmentVariable)
46
47 # Stuff required for pickling
48 import cPickle as pickle
49 #import pyxb.utils.pickle_trace as pickle
50
51 import re
54 """Represent a file from which one or more namespaces can be read, or to
55 which they will be written."""
56
57 # A code used to identify the format of the archive, so we don't
58 # mis-interpret its contents.
59 # YYYYMMDDHHMM
60 __PickleFormat = '200907190858'
61
62 @classmethod
64 """The category name to use when storing references to anonymous type
65 definitions. For example, attribute definitions defined within an
66 attribute use in a model group definition.that can be referenced frojm
67 ax different namespace."""
68 return cls.__AnonymousCategory
69 __AnonymousCategory = '_anonymousTypeDefinition'
70
71 @classmethod
73 """Return a reference to a set specifying the namespace instances that
74 are being archived.
75
76 This is needed to determine whether a component must be serialized as
77 aa reference."""
78 # NB: Use root class explicitly. If we use cls, when this is invoked
79 # by subclasses it gets mangled using the subclass name so the one
80 # defined in this class is not found
81 return NamespaceArchive.__PicklingArchive
82 # Class variable recording the namespace that is currently being
83 # pickled. Used to prevent storing components that belong to
84 # other namespaces. Should be None unless within an invocation of
85 # SaveToFile.
86 __PicklingArchive = None
87
88 __NamespaceArchives = None
89 """A mapping from generation UID to NamespaceArchive instances."""
90
92 """Remove this archive from the set of available archives.
93
94 This is invoked when an archive contains a namespace that the user has
95 specified should not be loaded."""
96 del self.__NamespaceArchives[self.generationUID()]
97 for ns in self.__namespaces:
98 ns._removeArchive(self)
99
100 @classmethod
102 """Return a L{NamespaceArchive} instance associated with the given file.
103
104 To the extent possible, the same file accessed through different paths
105 returns the same L{NamespaceArchive} instance.
106 """
107
108 nsa = NamespaceArchive(archive_path=archive_file, stage=cls._STAGE_uid)
109 rv = cls.__NamespaceArchives.get(nsa.generationUID(), nsa)
110 if rv == nsa:
111 cls.__NamespaceArchives[rv.generationUID()] = rv
112 rv._readToStage(stage)
113 return rv
114
115 __ArchivePattern_re = re.compile('\.wxs$')
116
117 @classmethod
119 """Scan for available archives, associating them with namespaces.
120
121 This only validates potential archive contents; it does not load
122 namespace data from the archives. If invoked with no arguments,
123
124 @keyword archive_path: A colon-separated list of files or directories in
125 which namespace archives can be found; see L{PathEnvironmentVariable}.
126 Defaults to L{GetArchivePath()}. If not defaulted, C{reset} will be
127 forced to C{True}. For any directory in the path, all files ending with
128 C{.wxs} are examined.
129
130 @keyword required_archive_files: A list of paths to files that must
131 resolve to valid namespace archives.
132
133 @keyword reset: If C{False} (default), the most recently read set of
134 archives is returned; if C{True}, the archive path is re-scanned and the
135 namespace associations validated.
136
137 @return: A list of L{NamespaceArchive} instances corresponding to the
138 members of C{required_archive_files}, in order. If
139 C{required_archive_files} was not provided, returns an empty list.
140
141 @raise pickle.UnpicklingError: a C{required_archive_files} member does not
142 contain a valid namespace archive.
143 """
144
145 import builtin
146
147 reset = reset or (archive_path is not None) or (required_archive_files is not None) or (cls.__NamespaceArchives is None)
148 required_archives = []
149 if reset:
150 # Get a list of pre-existing archives, initializing the map if
151 # this is the first time through.
152 if cls.__NamespaceArchives is None:
153 cls.__NamespaceArchives = { }
154 existing_archives = set(cls.__NamespaceArchives.values())
155 archive_set = set(required_archives)
156
157 # Get archives for all required files
158 if required_archive_files is not None:
159 for afn in required_archive_files:
160 required_archives.append(cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules))
161
162 # Ensure we have an archive path. If not, don't do anything.
163 if archive_path is None:
164 archive_path = GetArchivePath()
165 if archive_path is not None:
166
167 # Get archive instances for everything in the archive path
168 candidate_files = pyxb.utils.utility.GetMatchingFiles(archive_path, cls.__ArchivePattern_re, prefix_pattern='&', prefix_substituend=DefaultArchivePrefix)
169 for afn in candidate_files:
170 #print 'Considering %s' % (afn,)
171 try:
172 nsa = cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules)
173 archive_set.add(nsa)
174 except pickle.UnpicklingError, e:
175 print 'Cannot use archive %s: %s' % (afn, e)
176 except pyxb.NamespaceArchiveError, e:
177 print 'Cannot use archive %s: %s' % (afn, e)
178
179 # Do this for two reasons: first, to get an iterable that won't
180 # cause problems when we remove unresolvable archives from
181 # archive_set; and second to aid with forced dependency inversion
182 # testing
183 ordered_archives = sorted(list(archive_set), lambda _a,_b: cmp(_a.archivePath(), _b.archivePath()))
184 ordered_archives.reverse()
185
186 # Create a graph that identifies dependencies between the archives
187 archive_map = { }
188 for a in archive_set:
189 archive_map[a.generationUID()] = a
190 archive_graph = pyxb.utils.utility.Graph()
191 for a in ordered_archives:
192 prereqs = a._unsatisfiedModulePrerequisites()
193 if 0 < len(prereqs):
194 for p in prereqs:
195 if builtin.BuiltInObjectUID == p:
196 continue
197 da = archive_map.get(p)
198 if da is None:
199 print 'WARNING: %s depends on unavailable archive %s' % (a, p)
200 archive_set.remove(a)
201 else:
202 #print '%s depends on %s' % (a, da)
203 archive_graph.addEdge(a, da)
204 else:
205 #print '%s has no dependencies' % (a,)
206 archive_graph.addRoot(a)
207
208 # Verify that there are no dependency loops.
209 archive_scc = archive_graph.sccOrder()
210 for scc in archive_scc:
211 if 1 < len(scc):
212 raise pyxb.LogicError("Cycle in archive dependencies. How'd you do that?\n " + "\n ".join([ _a.archivePath() for _a in scc ]))
213 archive = scc[0]
214 if not (archive in archive_set):
215 #print 'Discarding unresolvable %s' % (archive,)
216 archive.discard()
217 existing_archives.remove(archive)
218 continue
219 #print 'Completing load of %s' % (archive,)
220 #archive._readToStage(cls._STAGE_COMPLETE)
221
222 # Discard any archives that we used to know about but now aren't
223 # supposed to. @todo make this friendlier in the case of archives
224 # we've already incorporated.
225 for archive in existing_archives.difference(archive_set):
226 print 'Discarding excluded archive %s' % (archive,)
227 archive.discard()
228
229 return required_archives
230
234 __archivePath = None
235
237 """The unique identifier for the generation that produced this archive."""
238 return self.__generationUID
239 __generationUID = None
240
242 """Return C{True} iff it is permissible to load the archive.
243 Archives created for output cannot be loaded."""
244 return self.__isLoadable
245 __isLoadable = None
246
248 self.__moduleRecords = set()
249 namespaces = set()
250 for ns in pyxb.namespace.utility.AvailableNamespaces():
251 # @todo allow these; right now it's usually the XML
252 # namespace and we're not prepared to reconcile
253 # redefinitions of those components.
254 if ns.isUndeclaredNamespace():
255 continue
256 mr = ns.lookupModuleRecordByUID(self.generationUID())
257 if mr is not None:
258 namespaces.add(ns)
259 mr.prepareForArchive(self)
260 self.__moduleRecords.add(mr)
261 self.__namespaces.update(namespaces)
263 """Return the set of L{module records <ModuleRecord>} stored in this
264 archive.
265
266 Each module record represents"""
267 return self.__moduleRecords
268 __moduleRecords = None
269
270 @classmethod
272 """Return the L{NamespaceArchive} instance that can be found at the
273 given path."""
274 return cls.__GetArchiveInstance(archive_file)
275
276 # States in the finite automaton that is used to read archive contents.
277 _STAGE_UNOPENED = 0 # Haven't even checked for existence
278 _STAGE_uid = 1 # Verified archive exists, obtained generation UID from it
279 _STAGE_readModules = 2 # Read module records from archive, which includes UID dependences
280 _STAGE_validateModules = 3 # Verified pre-requisites for module loading
281 _STAGE_readComponents = 4 # Extracted components from archive and integrated into namespaces
282 _STAGE_COMPLETE = _STAGE_readComponents
283
285 return self.__stage
286 __stage = None
287
289 """Create a new namespace archive.
290
291 If C{namespaces} is given, this is an output archive.
292
293 If C{namespaces} is absent, this is an input archive.
294
295 @raise IOError: error attempting to read the archive file
296 @raise pickle.UnpicklingError: something is wrong with the format of the library
297 """
298 self.__namespaces = set()
299 if generation_uid is not None:
300 if archive_path:
301 raise pyxb.LogicError('NamespaceArchive: cannot define both namespaces and archive_path')
302 self.__generationUID = generation_uid
303 self.__locateModuleRecords()
304 elif archive_path is not None:
305 if generation_uid is not None:
306 raise pyxb.LogicError('NamespaceArchive: cannot provide generation_uid with archive_path')
307 self.__archivePath = archive_path
308 self.__stage = self._STAGE_UNOPENED
309 self.__isLoadable = loadable
310 if self.__isLoadable:
311 if stage is None:
312 stage = self._STAGE_moduleRecords
313 self._readToStage(stage)
314 else:
315 pass
316
318 """Add the given namespace to the set that is to be stored in this archive."""
319 if namespace.isAbsentNamespace():
320 raise pyxb.NamespaceArchiveError('Cannot archive absent namespace')
321 self.__namespaces.add(namespace)
322
324 """Add the given namespaces to the set that is to be stored in this archive."""
325 [ self.add(_ns) for _ns in namespace_set ]
326
330 __namespaces = None
331
333 # @todo: support StringIO instances?
334 if not isinstance(output, file):
335 output = open(output, 'wb')
336 pickler = pickle.Pickler(output, -1)
337
338 # The format of the archive
339 pickler.dump(NamespaceArchive.__PickleFormat)
340
341 # The UID for the set
342 assert self.generationUID() is not None
343 pickler.dump(self.generationUID())
344
345 return pickler
346
348 unpickler = pickle.Unpickler(open(self.__archivePath, 'rb'))
349
350 format = unpickler.load()
351 if self.__PickleFormat != format:
352 raise pyxb.NamespaceArchiveError('Archive format is %s, require %s' % (format, self.__PickleFormat))
353
354 self.__generationUID = unpickler.load()
355
356 return unpickler
357
359 #print 'RM %x %s' % (id(self), self)
360 mrs = unpickler.load()
361 assert isinstance(mrs, set), 'Expected set got %s from %s' % (type(mrs), self.archivePath())
362 if self.__moduleRecords is None:
363 for mr in mrs.copy():
364 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
365 if mr2 is not None:
366 mr2._setFromOther(mr, self)
367 #print 'Replaced locally defined %s with archive data' % (mr2,)
368 mrs.remove(mr)
369 self.__moduleRecords = set()
370 assert 0 == len(self.__namespaces)
371 for mr in mrs:
372 mr._setArchive(self)
373 ns = mr.namespace()
374 ns.addModuleRecord(mr)
375 self.__namespaces.add(ns)
376 self.__moduleRecords.add(mr)
377 else:
378 # Verify the archive still has what was in it when we created this.
379 for mr in mrs:
380 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
381 if not (mr2 in self.__moduleRecords):
382 raise pyxb.NamespaceArchiveError('Lost module record %s %s from %s' % (mr.namespace(), mr.generationUID(), self.archivePath()))
383
385 prereq_uids = set()
386 for mr in self.__moduleRecords:
387 ns = mr.namespace()
388 #print 'Namespace %s records:' % (ns,)
389 #for xmr in ns.moduleRecords():
390 # print ' %s' % (xmr,)
391 prereq_uids.update(mr.dependsOnExternal())
392 return prereq_uids
393
395 import builtin
396 prereq_uids = self._unsatisfiedModulePrerequisites()
397 #print '%s depends on %d prerequisites' % (self, len(prereq_uids))
398 for uid in prereq_uids:
399 if builtin.BuiltInObjectUID == uid:
400 continue
401 depends_on = self.__NamespaceArchives.get(uid)
402 if depends_on is None:
403 raise pyxb.NamespaceArchiveError('%s: archive depends on unavailable archive %s' % (self.archivePath(), uid))
404 #print '%s stage %s depends on %s at %s going to %s' % (self, self._stage(), depends_on, depends_on._stage(), stage)
405 depends_on._readToStage(stage)
406
408 self.__validatePrerequisites(self._STAGE_validateModules)
409 for mr in self.__moduleRecords:
410 ns = mr.namespace()
411 #print 'Namespace %s records:' % (ns,)
412 #for xmr in ns.moduleRecords():
413 # print ' %s' % (xmr,)
414 for base_uid in mr.dependsOnExternal():
415 xmr = ns.lookupModuleRecordByUID(base_uid)
416 if xmr is None:
417 raise pyxb.NamespaceArchiveError('Module %s depends on external module %s, not available in archive path' % (mr.generationUID(), base_uid))
418 if not xmr.isIncorporated():
419 print 'Need to incorporate data from %s' % (xmr,)
420 else:
421 print 'Have required base data %s' % (xmr,)
422
423 for origin in mr.origins():
424 #print 'mr %s origin %s' % (mr, origin)
425 for (cat, names) in origin.categoryMembers().iteritems():
426 if not (cat in ns.categories()):
427 continue
428 cross_objects = names.intersection(ns.categoryMap(cat).keys())
429 if 0 < len(cross_objects):
430 raise pyxb.NamespaceArchiveError('Archive %s namespace %s module %s origin %s archive/active conflict on category %s: %s' % (self.__archivePath, ns, mr, origin, cat, " ".join(cross_objects)))
431 print '%s no conflicts on %d names' % (cat, len(names))
432
434 self.__validatePrerequisites(self._STAGE_readComponents)
435 print 'RCS %s' % (self,)
436 for n in range(len(self.__moduleRecords)):
437 ns = unpickler.load()
438 mr = ns.lookupModuleRecordByUID(self.generationUID())
439 assert mr in self.__moduleRecords
440 assert not mr.isIncorporated()
441 objects = unpickler.load()
442 mr._loadCategoryObjects(objects)
443
444 __unpickler = None
446 if self.__stage is None:
447 raise pyxb.NamespaceArchiveError('Attempt to read from invalid archive %s' % (self,))
448 try:
449 while self.__stage < stage:
450 #print 'RTS %s want %s' % (self.__stage, stage)
451 if self.__stage < self._STAGE_uid:
452 self.__unpickler = self.__createUnpickler()
453 self.__stage = self._STAGE_uid
454 continue
455 if self.__stage < self._STAGE_readModules:
456 assert self.__unpickler is not None
457 self.__readModules(self.__unpickler)
458 self.__stage = self._STAGE_readModules
459 continue
460 if self.__stage < self._STAGE_validateModules:
461 self.__validateModules()
462 self.__stage = self._STAGE_validateModules
463 continue
464 if self.__stage < self._STAGE_readComponents:
465 assert self.__unpickler is not None
466 self.__stage = self._STAGE_readComponents
467 self.__readComponentSet(self.__unpickler)
468 self.__unpickler = None
469 continue
470 raise pyxb.LogicError('Too many stages (at %s, want %s)' % (self.__stage, stage))
471 except:
472 self.__stage = None
473 self.__unpickler = None
474 raise
475
477 """Read all the components from this archive, integrating them into
478 their respective namespaces."""
479 self._readToStage(self._STAGE_COMPLETE)
480
482 """Store the namespaces into the archive.
483
484 @param output: An instance substitutable for a writable file, or the
485 name of a file to write to.
486 """
487 import sys
488
489 assert NamespaceArchive.__PicklingArchive is None
490 NamespaceArchive.__PicklingArchive = self
491 assert self.__moduleRecords is not None
492
493 # Recalculate the record/object associations: we didn't assign
494 # anonymous names to the indeterminate scope objects because they
495 # weren't needed for bindings, but they are needed in the archive.
496 for mr in self.__moduleRecords:
497 mr.namespace()._associateOrigins(mr)
498
499 try:
500 # See http://bugs.python.org/issue3338
501 recursion_limit = sys.getrecursionlimit()
502 sys.setrecursionlimit(10 * recursion_limit)
503
504 pickler = self.__createPickler(output)
505
506 assert isinstance(self.__moduleRecords, set)
507 print "\n".join([ str(_mr) for _mr in self.__moduleRecords ])
508 pickler.dump(self.__moduleRecords)
509
510 for mr in self.__moduleRecords:
511 pickler.dump(mr.namespace())
512 pickler.dump(mr.categoryObjects())
513 finally:
514 sys.setrecursionlimit(recursion_limit)
515 NamespaceArchive.__PicklingArchive = None
516
518 archive_path = self.__archivePath
519 if archive_path is None:
520 archive_path = '??'
521 return 'NSArchive@%s' % (archive_path,)
522
524 """Mix-in to any object that can be stored in a namespace within an archive."""
525
526 # Need to set this per category item
527 __objectOrigin = None
529 return self.__objectOrigin
531 if (self.__objectOrigin is not None) and (not override):
532 if self.__objectOrigin != object_origin:
533 raise pyxb.LogicError('Inconsistent origins for object %s: %s %s' % (self, self.__objectOrigin, object_origin))
534 else:
535 self.__objectOrigin = object_origin
536
538 #assert self.__objectOrigin is not None
539 if self._objectOrigin() is not None:
540 return getattr(super(_ArchivableObject_mixin, self), '_prepareForArchive_csc', lambda *_args,**_kw: self)(self._objectOrigin().moduleRecord())
541 assert not isinstance(self, pyxb.xmlschema.structures._NamedComponent_mixin)
542
544 return getattr(super(_ArchivableObject_mixin, self), '_updateFromOther_csc', lambda *_args,**_kw: self)(other)
545
547 """Update this instance with additional information provided by the other instance.
548
549 This is used, for example, when a built-in type is already registered
550 in the namespace, but we've processed the corresponding schema and
551 have obtained more details."""
552 assert self != other
553 return self._updateFromOther_csc(other)
554
556 import builtin
557 assert self._objectOrigin()
558 return builtin.BuiltInObjectUID == self._objectOrigin().generationUID()
559
561 """Encapsulate the operations and data relevant to archiving namespaces.
562
563 This class mixes-in to L{pyxb.namespace.Namespace}"""
564
566 """CSC extension to reset fields of a Namespace.
567
568 This one handles category-related data."""
569 getattr(super(_NamespaceArchivable_mixin, self), '_reset', lambda *args, **kw: None)()
570 self.__loadedFromArchive = None
571 self.__wroteToArchive = None
572 self.__active = False
573 self.__moduleRecordMap = {}
574
576 return self.__loadedFromArchive
577
578 __wroteToArchive = None
579 __loadedFromArchive = None
580
582 if self.__isActive and empty_inactive:
583 for (ct, cm) in self._categoryMap().items():
584 if 0 < len(cm):
585 print '%s: %d %s -- activated' % (self, len(cm), ct)
586 return True
587 return False
588 return self.__isActive
589
594 __isActive = None
595
598
604
606 # Yes, I do want this to raise KeyError if the archive is not present
607 mr = self.__moduleRecordMap[archive.generationUID()]
608 assert not mr.isIncorporated(), 'Removing archive %s after incorporation' % (archive.archivePath(),)
609 # print 'removing %s' % (mr,)
610 del self.__moduleRecordMap[archive.generationUID()]
611
613 """Return C{True} iff the component model for this namespace can be
614 loaded from a namespace archive."""
615 for mr in self.moduleRecords():
616 if mr.isLoadable():
617 return True
618 return False
619
621 """Return the list of archives from which components for this
622 namespace can be loaded."""
623 rv = []
624 for mr in self.moduleRecords():
625 if mr.isLoadable():
626 rv.append(mr.archive())
627 return rv
628
631 __moduleRecordMap = None
632
634 assert isinstance(module_record, ModuleRecord)
635 # This assertion will fail if the binding is loaded before its archive
636 # is scanned.
637 assert not (module_record.generationUID() in self.__moduleRecordMap)
638 self.__moduleRecordMap[module_record.generationUID()] = module_record
639 return module_record
641 rv = self.__moduleRecordMap.get(generation_uid)
642 if (rv is None) and create_if_missing:
643 rv = self.addModuleRecord(ModuleRecord(self, generation_uid, *args, **kw))
644 return rv
645
647 #assert not self.__isActive, 'ERROR: State set for active namespace %s' % (self,)
648 return getattr(super(_NamespaceArchivable_mixin, self), '_getState_csc', lambda _kw: _kw)(kw)
649
651 """Prevent loading this namespace from an archive.
652
653 This marks all archives in which the namespace appears, whether
654 publically or privately, as not loadable."""
655 if self._loadedFromArchive():
656 raise pyxb.NamespaceError(self, 'cannot mark not loadable when already loaded')
657 for mr in self.moduleRecords():
658 mr._setIsLoadable(False)
659
661 __PrivateTransient = set()
662
664 return self.__namespace
665 __namespace = None
666
668 return self.__archive
672 __archive = None
673 __PrivateTransient.add('archive')
674
676 return self.__isPublic
680 __isPublic = None
681
685 assert self.__isLoadable
686 self.__isIncorporated = True
687 self.__isLoadable = False
688 return self
689 __isIncorporated = None
690 __PrivateTransient.add('isIncorporated')
691
697 __isLoadable = None
698
700 return self.__generationUID
701 __generationUID = None
702
706 assert isinstance(origin, _ObjectOrigin)
707 assert not (origin.signature() in self.__originMap)
708 self.__originMap[origin.signature()] = origin
709 return origin
713 if self.__originMap is None:
714 self.__originMap = {}
715 else:
716 self.__originMap.clear()
717 [ self.addOrigin(_o) for _o in origins ]
718 return self
719 __originMap = None
720
726
728 return self.__modulePath
730 assert (module_path is None) or isinstance(module_path, basestring)
731 self.__modulePath = module_path
732 return self
733 __modulePath = None
734
736 return self.__module
738 self.__module = module
739 # This is a nice idea, but screws up the unit tests that already have
740 # ns1 and the like logged as expected prefixes. Only causes a problem
741 # when the tests are run individually; dunno why.
742 #ns = self.namespace()
743 #if (ns.prefix() is None) and (module is not None):
744 # try:
745 # ns.setPrefix(os.path.basename(os.path.normpath(module.__file__)).split('.')[0])
746 # except AttributeError:
747 # pass
748 return self
749 __module = None
750 __PrivateTransient.add('module')
751
753 return self.__referencedNamespaces
760 __referencedNamespaces = None
761
762 __constructedLocally = False
763 __PrivateTransient.add('constructedLocally')
764
766 import builtin
767
768 super(ModuleRecord, self).__init__()
769 self.__namespace = namespace
770 #print 'Created MR for %s gen %s' % (namespace, generation_uid)
771 assert (generation_uid != builtin.BuiltInObjectUID) or namespace.isBuiltinNamespace()
772 self.__isPublic = kw.get('is_public', False)
773 self.__isIncoporated = kw.get('is_incorporated', False)
774 self.__isLoadable = kw.get('is_loadable', True)
775 assert isinstance(generation_uid, pyxb.utils.utility.UniqueIdentifier)
776 self.__generationUID = generation_uid
777 self.__modulePath = kw.get('module_path')
778 self.__module = kw.get('module')
779 self.__originMap = {}
780 self.__referencedNamespaces = set()
781 self.__categoryObjects = { }
782 self.__constructedLocally = True
783 self.__dependsOnExternal = set()
784
786 if (not self.__constructedLocally) or other.__constructedLocally:
787 raise pyxb.ImplementationError('Module record update requires local to be updated from archive')
788 assert self.__generationUID == other.__generationUID
789 assert self.__archive is None
790 self.__isPublic = other.__isPublic
791 assert not self.__isIncorporated
792 self.__isLoadable = other.__isLoadable
793 self.__modulePath = other.__modulePath
794 # self.__module already set correctly
795 self.__originMap.update(other.__originMap)
796 self.__referencedNamespaces.update(other.__referencedNamespaces)
797 if not (other.__categoryObjects is None):
798 self.__categoryObjects.update(other.__categoryObjects)
799 self.__dependsOnExternal.update(other.__dependsOnExternal)
800 self._setArchive(archive)
801
803 return self.__categoryObjects
805 self.__categoryObjects.clear()
806 for origin in self.origins():
807 origin.resetCategoryMembers()
812 assert self.__categoryObjects is None
813 assert not self.__constructedLocally
814 ns = self.namespace()
815 ns.configureCategories(category_objects.keys())
816 for (cat, obj_map) in category_objects.iteritems():
817 current_map = ns.categoryMap(cat)
818 for (local_name, component) in obj_map.iteritems():
819 existing_component = current_map.get(local_name)
820 if existing_component is None:
821 current_map[local_name] = component
822 elif existing_component._allowUpdateFromOther(component):
823 existing_component._updateFromOther(component)
824 else:
825 raise pyxb.NamespaceError(self, 'Load attempted to override %s %s in %s' % (cat, local_name, self.namespace()))
826 self.markIncorporated()
827 __categoryObjects = None
828 __PrivateTransient.add('categoryObjects')
829
831 return self.__dependsOnExternal
832 __dependsOnExternal = None
833
835 assert self.archive() is None
836 self._setArchive(archive)
837 ns = self.namespace()
838 self.__dependsOnExternal.clear()
839 for mr in ns.moduleRecords():
840 if mr != self:
841 print 'This gen depends on %s' % (mr,)
842 self.__dependsOnExternal.add(mr.generationUID())
843 for obj in ns._namedObjects().union(ns.components()):
844 if isinstance(obj, _ArchivableObject_mixin):
845 if obj._objectOrigin():
846 obj._prepareForArchive(self)
847 #print 'Archive %s ns %s module %s has %d origins' % (self.archive(), self.namespace(), self, len(self.origins()))
848
850 self.namespace()._transferReferencedNamespaces(self)
851 self.namespace()._associateOrigins(self)
852
855
857 """Marker class for objects that can serve as an origin for an object in a
858 namespace."""
859 __PrivateTransient = set()
860
862 return self.__signature
863 __signature = None
864
866 return self.__moduleRecord
867 __moduleRecord = None
868
871
874
876 self.__signature = kw.pop('signature', None)
877 super(_ObjectOrigin, self).__init__(**kw)
878 self.__moduleRecord = namespace.lookupModuleRecordByUID(generation_uid, create_if_missing=True, **kw)
879 self.__moduleRecord.addOrigin(self)
880 self.__categoryMembers = { }
881 self.__categoryObjectMap = { }
882
884 self.__categoryMembers.clear()
885 self.__categoryObjectMap.clear()
886 self.__originatedComponents = None
888 self.__categoryMembers.setdefault(category, set()).add(name)
889 self.__categoryObjectMap.setdefault(category, {})[name] = obj
890 self.__moduleRecord._addCategoryObject(category, name, obj)
892 return self.__categoryMembers
894 if self.__originatedObjects is None:
895 components = set()
896 [ components.update(_v.values()) for _v in self.__categoryObjectMap.itervalues() ]
897 self.__originatedObjects = frozenset(components)
898 return self.__originatedObjects
899
900 # The set of category names associated with objects. Don't throw this
901 # away and use categoryObjectMap.keys() instead: that's transient, and we
902 # need this to have a value when read from an archive.
903 __categoryMembers = None
904
905 # Map from category name to a map from an object name to the object
906 __categoryObjectMap = None
907 __PrivateTransient.add('categoryObjectMap')
908
909 # The set of objects that originated at this origin
910 __originatedObjects = None
911 __PrivateTransient.add('originatedObjects')
912
914 """Holds the data regarding components derived from a single schema.
915
916 Coupled to a particular namespace through the
917 L{_NamespaceComponentAssociation_mixin}.
918 """
919
920 __PrivateTransient = set()
921
923 schema = kw.get('schema')
924 if schema is not None:
925 assert not ('location' in kw)
926 kw['location'] = schema.location()
927 assert not ('signature' in kw)
928 kw['signature'] = schema.signature()
929 assert not ('generation_uid' in kw)
930 kw['generation_uid'] = schema.generationUID()
931 assert not ('namespace' in kw)
932 kw['namespace'] = schema.targetNamespace()
933 assert not ('version' in kw)
934 kw['version'] = schema.schemaAttribute('version')
935
937 """Determine whether this record matches the parameters.
938
939 @keyword schema: a L{pyxb.xmlschema.structures.Schema} instance from
940 which the other parameters are obtained.
941 @keyword location: a schema location (URI)
942 @keyword signature: a schema signature
943 @return: C{True} iff I{either} C{location} or C{signature} matches."""
944 self.__setDefaultKW(kw)
945 location = kw.get('location')
946 if (location is not None) and (self.location() == location):
947 return True
948 signature = kw.get('signature')
949 if (signature is not None) and (self.signature() == signature):
950 return True
951 return False
952
954 return self.__location
955 __location = None
956
958 return self.__schema
959 __schema = None
960 __PrivateTransient.add('schema')
961
963 return self.__version
964 __version = None
965
967 self.__setDefaultKW(kw)
968 self.__schema = kw.pop('schema', None)
969 self.__location = kw.pop('location', None)
970 self.__version = kw.pop('version', None)
971 super(_SchemaOrigin, self).__init__(kw.pop('namespace'), kw.pop('generation_uid'), **kw)
972
979
981 """Return a list of namespace URIs for which we may be able to load the
982 namespace contents from a pre-parsed file. The corresponding L{Namespace}
983 can be retrieved using L{NamespaceForURI}, and the declared objects in
984 that namespace loaded with L{Namespace.validateComponentModel}.
985
986 Note that success of the load is not guaranteed if the packed file
987 is not compatible with the schema class being used."""
988 # Invoke this to ensure we have searched for loadable namespaces
989 return _LoadableNamespaceMap().keys()
990
992
994 return self.__rootNamespaces
995 __rootNamespaces = None
996
998 if reset or (self.__namespaceGraph is None):
999 self.__namespaceGraph = pyxb.utils.utility.Graph()
1000 map(self.__namespaceGraph.addRoot, self.rootNamespaces())
1001
1002 # Make sure all referenced namespaces have valid components
1003 need_check = self.__rootNamespaces.copy()
1004 done_check = set()
1005 while 0 < len(need_check):
1006 ns = need_check.pop()
1007 ns.validateComponentModel()
1008 self.__namespaceGraph.addNode(ns)
1009 for rns in ns.referencedNamespaces().union(ns.importedNamespaces()):
1010 self.__namespaceGraph.addEdge(ns, rns)
1011 if not rns in done_check:
1012 need_check.add(rns)
1013 if not ns.hasSchemaComponents():
1014 print 'WARNING: Referenced %s has no schema components' % (ns.uri(),)
1015 done_check.add(ns)
1016 assert done_check == self.__namespaceGraph.nodes()
1017
1018 return self.__namespaceGraph
1019 __namespaceGraph = None
1020
1023
1025 siblings = set()
1026 ns_graph = self.namespaceGraph(reset)
1027 for ns in self.__rootNamespaces:
1028 ns_siblings = ns_graph.sccMap().get(ns)
1029 if ns_siblings is not None:
1030 siblings.update(ns_siblings)
1031 else:
1032 siblings.add(ns)
1033 return siblings
1034
1036 if self.__siblingNamespaces is None:
1037 self.__siblingNamespaces = self.siblingsFromGraph()
1038 return self.__siblingNamespaces
1039
1041 self.__siblingNamespaces = sibling_namespaces
1042
1043 __siblingNamespaces = None
1044
1047
1049 if reset or (self.__componentGraph is None):
1050 self.__componentGraph = pyxb.utils.utility.Graph()
1051 all_components = set()
1052 for ns in self.siblingNamespaces():
1053 [ all_components.add(_c) for _c in ns.components() if _c.hasBinding() ]
1054
1055 need_visit = all_components.copy()
1056 while 0 < len(need_visit):
1057 c = need_visit.pop()
1058 self.__componentGraph.addNode(c)
1059 for cd in c.bindingRequires(include_lax=True):
1060 if cd in all_components:
1061 self.__componentGraph.addEdge(c, cd)
1062 return self.__componentGraph
1063 __componentGraph = None
1064
1067
1069 namespace_set = set(kw.get('namespace_set', []))
1070 namespace = kw.get('namespace')
1071 if namespace is not None:
1072 namespace_set.add(namespace)
1073 if 0 == len(namespace_set):
1074 raise pyxb.LogicError('NamespaceDependencies requires at least one root namespace')
1075 self.__rootNamespaces = namespace_set
1076
1077
1078 ## Local Variables:
1079 ## fill-column:78
1080 ## End:
1081
| Home | Trees | Indices | Help |
|
|---|
| Generated by Epydoc 3.0.1 on Thu Jan 28 08:39:56 2010 | http://epydoc.sourceforge.net |