| Home | Trees | Indices | Help |
|
|---|
|
|
1 # -*- coding: utf-8 -*-
2 # Copyright 2009-2012, Peter A. Bigot
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may
5 # not use this file except in compliance with the License. You may obtain a
6 # copy of the License at:
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations
14 # under the License.
15
16 """Classes and global objects related to archiving U{XML
17 Namespaces<http://www.w3.org/TR/2006/REC-xml-names-20060816/index.html>}."""
18
19 import pyxb
20 import os
21 import os.path
22 import pyxb.utils.utility
23 import logging
24
25 _log = logging.getLogger(__name__)
26
27 PathEnvironmentVariable = 'PYXB_ARCHIVE_PATH'
28 """Environment variable from which default path to pre-loaded namespaces is
29 read. The value should be a colon-separated list of absolute paths. The
30 character C{&} at the start of a member of the list is replaced by the path to
31 the directory where the %{pyxb} modules are found, including a trailing C{/}.
32 For example, use C{&pyxb/bundles//} to enable search of any archive bundled
33 with PyXB.
34
35 @note: If you put a path separator between C{&} and the following path, this
36 will cause the substitution to be ignored."""
37
38 DefaultArchivePrefix = os.path.realpath(os.path.join(os.path.dirname( __file__), '../..'))
39 """The default archive prefix, substituted for C{&} in C{PYXB_ARCHIVE_PATH}."""
42 """Return the archive path as defined by the L{PathEnvironmentVariable},
43 or C{None} if that variable is not defined."""
44 return os.environ.get(PathEnvironmentVariable)
45
46 # Stuff required for pickling
47 import cPickle as pickle
48 import re
51 """Represent a file from which one or more namespaces can be read, or to
52 which they will be written."""
53
54 # A code used to identify the format of the archive, so we don't
55 # mis-interpret its contents.
56 # YYYYMMDDHHMM
57 __PickleFormat = '200907190858'
58
59 @classmethod
61 """The category name to use when storing references to anonymous type
62 definitions. For example, attribute definitions defined within an
63 attribute use in a model group definition.that can be referenced frojm
64 ax different namespace."""
65 return cls.__AnonymousCategory
66 __AnonymousCategory = '_anonymousTypeDefinition'
67
68 @classmethod
70 """Return a reference to a set specifying the namespace instances that
71 are being archived.
72
73 This is needed to determine whether a component must be serialized as
74 aa reference."""
75 # NB: Use root class explicitly. If we use cls, when this is invoked
76 # by subclasses it gets mangled using the subclass name so the one
77 # defined in this class is not found
78 return NamespaceArchive.__PicklingArchive
79 # Class variable recording the namespace that is currently being
80 # pickled. Used to prevent storing components that belong to
81 # other namespaces. Should be None unless within an invocation of
82 # SaveToFile.
83 __PicklingArchive = None
84
85 __NamespaceArchives = None
86 """A mapping from generation UID to NamespaceArchive instances."""
87
89 """Remove this archive from the set of available archives.
90
91 This is invoked when an archive contains a namespace that the user has
92 specified should not be loaded."""
93 del self.__NamespaceArchives[self.generationUID()]
94 for ns in self.__namespaces:
95 ns._removeArchive(self)
96
97 @classmethod
99 """Return a L{NamespaceArchive} instance associated with the given file.
100
101 To the extent possible, the same file accessed through different paths
102 returns the same L{NamespaceArchive} instance.
103 """
104
105 nsa = NamespaceArchive(archive_path=archive_file, stage=cls._STAGE_uid)
106 rv = cls.__NamespaceArchives.get(nsa.generationUID(), nsa)
107 if rv == nsa:
108 cls.__NamespaceArchives[rv.generationUID()] = rv
109 rv._readToStage(stage)
110 return rv
111
112 __ArchivePattern_re = re.compile('\.wxs$')
113
114 @classmethod
116 """Scan for available archives, associating them with namespaces.
117
118 This only validates potential archive contents; it does not load
119 namespace data from the archives. If invoked with no arguments,
120
121 @keyword archive_path: A list of files or directories in which
122 namespace archives can be found. The entries are separated by
123 os.pathsep, which is a colon on POSIX platforms and a semi-colon on
124 Windows. See L{PathEnvironmentVariable}. Defaults to
125 L{GetArchivePath()}. If not defaulted, C{reset} will be forced to
126 C{True}. For any directory in the path, all files ending with
127 C{.wxs} are examined.
128
129 @keyword required_archive_files: A list of paths to files that must
130 resolve to valid namespace archives.
131
132 @keyword reset: If C{False} (default), the most recently read set of
133 archives is returned; if C{True}, the archive path is re-scanned and the
134 namespace associations validated.
135
136 @return: A list of L{NamespaceArchive} instances corresponding to the
137 members of C{required_archive_files}, in order. If
138 C{required_archive_files} was not provided, returns an empty list.
139
140 @raise pickle.UnpicklingError: a C{required_archive_files} member does not
141 contain a valid namespace archive.
142 """
143
144 from pyxb.namespace import builtin
145
146 reset = reset or (archive_path is not None) or (required_archive_files is not None) or (cls.__NamespaceArchives is None)
147 required_archives = []
148 if reset:
149 # Get a list of pre-existing archives, initializing the map if
150 # this is the first time through.
151 if cls.__NamespaceArchives is None:
152 cls.__NamespaceArchives = { }
153 existing_archives = set(cls.__NamespaceArchives.values())
154 archive_set = set(required_archives)
155
156 # Get archives for all required files
157 if required_archive_files is not None:
158 for afn in required_archive_files:
159 required_archives.append(cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules))
160
161 # Ensure we have an archive path. If not, don't do anything.
162 if archive_path is None:
163 archive_path = GetArchivePath()
164 if archive_path is not None:
165
166 # Get archive instances for everything in the archive path
167 candidate_files = pyxb.utils.utility.GetMatchingFiles(archive_path, cls.__ArchivePattern_re,
168 default_path_wildcard='+', default_path=GetArchivePath(),
169 prefix_pattern='&', prefix_substituend=DefaultArchivePrefix)
170 for afn in candidate_files:
171 try:
172 nsa = cls.__GetArchiveInstance(afn, stage=cls._STAGE_readModules)
173 archive_set.add(nsa)
174 except pickle.UnpicklingError as e:
175 _log.exception('Cannot unpickle archive %s', afn)
176 except pyxb.NamespaceArchiveError as e:
177 _log.exception('Cannot process archive %s', afn)
178
179 # Do this for two reasons: first, to get an iterable that won't
180 # cause problems when we remove unresolvable archives from
181 # archive_set; and second to aid with forced dependency inversion
182 # testing
183 ordered_archives = sorted(list(archive_set), lambda _a,_b: cmp(_a.archivePath(), _b.archivePath()))
184 ordered_archives.reverse()
185
186 # Create a graph that identifies dependencies between the archives
187 archive_map = { }
188 for a in archive_set:
189 archive_map[a.generationUID()] = a
190 archive_graph = pyxb.utils.utility.Graph()
191 for a in ordered_archives:
192 prereqs = a._unsatisfiedModulePrerequisites()
193 if 0 < len(prereqs):
194 for p in prereqs:
195 if builtin.BuiltInObjectUID == p:
196 continue
197 da = archive_map.get(p)
198 if da is None:
199 _log.warning('%s depends on unavailable archive %s', a, p)
200 archive_set.remove(a)
201 else:
202 archive_graph.addEdge(a, da)
203 else:
204 archive_graph.addRoot(a)
205
206 # Verify that there are no dependency loops.
207 archive_scc = archive_graph.sccOrder()
208 for scc in archive_scc:
209 if 1 < len(scc):
210 raise pyxb.LogicError("Cycle in archive dependencies. How'd you do that?\n " + "\n ".join([ _a.archivePath() for _a in scc ]))
211 archive = scc[0]
212 if not (archive in archive_set):
213 archive.discard()
214 existing_archives.remove(archive)
215 continue
216 #archive._readToStage(cls._STAGE_COMPLETE)
217
218 # Discard any archives that we used to know about but now aren't
219 # supposed to. @todo make this friendlier in the case of archives
220 # we've already incorporated.
221 for archive in existing_archives.difference(archive_set):
222 _log.info('Discarding excluded archive %s', archive)
223 archive.discard()
224
225 return required_archives
226
230 __archivePath = None
231
233 """The unique identifier for the generation that produced this archive."""
234 return self.__generationUID
235 __generationUID = None
236
238 """Return C{True} iff it is permissible to load the archive.
239 Archives created for output cannot be loaded."""
240 return self.__isLoadable
241 __isLoadable = None
242
244 self.__moduleRecords = set()
245 namespaces = set()
246 for ns in pyxb.namespace.utility.AvailableNamespaces():
247 # @todo allow these; right now it's usually the XML
248 # namespace and we're not prepared to reconcile
249 # redefinitions of those components.
250 if ns.isUndeclaredNamespace():
251 continue
252 mr = ns.lookupModuleRecordByUID(self.generationUID())
253 if mr is not None:
254 namespaces.add(ns)
255 mr.prepareForArchive(self)
256 self.__moduleRecords.add(mr)
257 self.__namespaces.update(namespaces)
259 """Return the set of L{module records <ModuleRecord>} stored in this
260 archive.
261
262 Each module record represents"""
263 return self.__moduleRecords
264 __moduleRecords = None
265
266 @classmethod
268 """Return the L{NamespaceArchive} instance that can be found at the
269 given path."""
270 return cls.__GetArchiveInstance(archive_file)
271
272 # States in the finite automaton that is used to read archive contents.
273 _STAGE_UNOPENED = 0 # Haven't even checked for existence
274 _STAGE_uid = 1 # Verified archive exists, obtained generation UID from it
275 _STAGE_readModules = 2 # Read module records from archive, which includes UID dependences
276 _STAGE_validateModules = 3 # Verified pre-requisites for module loading
277 _STAGE_readComponents = 4 # Extracted components from archive and integrated into namespaces
278 _STAGE_COMPLETE = _STAGE_readComponents
279
281 return self.__stage
282 __stage = None
283
285 """Create a new namespace archive.
286
287 If C{namespaces} is given, this is an output archive.
288
289 If C{namespaces} is absent, this is an input archive.
290
291 @raise IOError: error attempting to read the archive file
292 @raise pickle.UnpicklingError: something is wrong with the format of the library
293 """
294 self.__namespaces = set()
295 if generation_uid is not None:
296 if archive_path:
297 raise pyxb.LogicError('NamespaceArchive: cannot define both namespaces and archive_path')
298 self.__generationUID = generation_uid
299 self.__locateModuleRecords()
300 elif archive_path is not None:
301 if generation_uid is not None:
302 raise pyxb.LogicError('NamespaceArchive: cannot provide generation_uid with archive_path')
303 self.__archivePath = archive_path
304 self.__stage = self._STAGE_UNOPENED
305 self.__isLoadable = loadable
306 if self.__isLoadable:
307 if stage is None:
308 stage = self._STAGE_moduleRecords
309 self._readToStage(stage)
310 else:
311 pass
312
314 """Add the given namespace to the set that is to be stored in this archive."""
315 if namespace.isAbsentNamespace():
316 raise pyxb.NamespaceArchiveError('Cannot archive absent namespace')
317 self.__namespaces.add(namespace)
318
320 """Add the given namespaces to the set that is to be stored in this archive."""
321 [ self.add(_ns) for _ns in namespace_set ]
322
326 __namespaces = None
327
329 # @todo: support StringIO instances?
330 if not isinstance(output, file):
331 output = open(output, 'wb')
332 pickler = pickle.Pickler(output, -1)
333
334 # The format of the archive
335 pickler.dump(NamespaceArchive.__PickleFormat)
336
337 # The UID for the set
338 assert self.generationUID() is not None
339 pickler.dump(self.generationUID())
340
341 return pickler
342
344 unpickler = pickle.Unpickler(open(self.__archivePath, 'rb'))
345
346 fmt = unpickler.load()
347 if self.__PickleFormat != fmt:
348 raise pyxb.NamespaceArchiveError('Archive format is %s, require %s' % (fmt, self.__PickleFormat))
349
350 self.__generationUID = unpickler.load()
351
352 return unpickler
353
355 mrs = unpickler.load()
356 assert isinstance(mrs, set), 'Expected set got %s from %s' % (type(mrs), self.archivePath())
357 if self.__moduleRecords is None:
358 for mr in mrs.copy():
359 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
360 if mr2 is not None:
361 mr2._setFromOther(mr, self)
362 mrs.remove(mr)
363 self.__moduleRecords = set()
364 assert 0 == len(self.__namespaces)
365 for mr in mrs:
366 mr._setArchive(self)
367 ns = mr.namespace()
368 ns.addModuleRecord(mr)
369 self.__namespaces.add(ns)
370 self.__moduleRecords.add(mr)
371 else:
372 # Verify the archive still has what was in it when we created this.
373 for mr in mrs:
374 mr2 = mr.namespace().lookupModuleRecordByUID(mr.generationUID())
375 if not (mr2 in self.__moduleRecords):
376 raise pyxb.NamespaceArchiveError('Lost module record %s %s from %s' % (mr.namespace(), mr.generationUID(), self.archivePath()))
377
379 prereq_uids = set()
380 for mr in self.__moduleRecords:
381 prereq_uids.update(mr.dependsOnExternal())
382 return prereq_uids
383
385 from pyxb.namespace import builtin
386 prereq_uids = self._unsatisfiedModulePrerequisites()
387 for uid in prereq_uids:
388 if builtin.BuiltInObjectUID == uid:
389 continue
390 depends_on = self.__NamespaceArchives.get(uid)
391 if depends_on is None:
392 raise pyxb.NamespaceArchiveError('%s: archive depends on unavailable archive %s' % (self.archivePath(), uid))
393 depends_on._readToStage(stage)
394
396 self.__validatePrerequisites(self._STAGE_validateModules)
397 for mr in self.__moduleRecords:
398 ns = mr.namespace()
399 for base_uid in mr.dependsOnExternal():
400 xmr = ns.lookupModuleRecordByUID(base_uid)
401 if xmr is None:
402 raise pyxb.NamespaceArchiveError('Module %s depends on external module %s, not available in archive path' % (mr.generationUID(), base_uid))
403 if not xmr.isIncorporated():
404 _log.info('Need to incorporate data from %s', xmr)
405 else:
406 _log.info('Have required base data %s', xmr)
407
408 for origin in mr.origins():
409 for (cat, names) in origin.categoryMembers().iteritems():
410 if not (cat in ns.categories()):
411 continue
412 cross_objects = names.intersection(ns.categoryMap(cat).keys())
413 if 0 < len(cross_objects):
414 raise pyxb.NamespaceArchiveError('Archive %s namespace %s module %s origin %s archive/active conflict on category %s: %s' % (self.__archivePath, ns, mr, origin, cat, " ".join(cross_objects)))
415 _log.info('%s no conflicts on %d names', cat, len(names))
416
418 self.__validatePrerequisites(self._STAGE_readComponents)
419 for n in range(len(self.__moduleRecords)):
420 ns = unpickler.load()
421 mr = ns.lookupModuleRecordByUID(self.generationUID())
422 assert mr in self.__moduleRecords
423 assert not mr.isIncorporated()
424 objects = unpickler.load()
425 mr._loadCategoryObjects(objects)
426
427 __unpickler = None
429 if self.__stage is None:
430 raise pyxb.NamespaceArchiveError('Attempt to read from invalid archive %s' % (self,))
431 try:
432 while self.__stage < stage:
433 if self.__stage < self._STAGE_uid:
434 self.__unpickler = self.__createUnpickler()
435 self.__stage = self._STAGE_uid
436 continue
437 if self.__stage < self._STAGE_readModules:
438 assert self.__unpickler is not None
439 self.__readModules(self.__unpickler)
440 self.__stage = self._STAGE_readModules
441 continue
442 if self.__stage < self._STAGE_validateModules:
443 self.__validateModules()
444 self.__stage = self._STAGE_validateModules
445 continue
446 if self.__stage < self._STAGE_readComponents:
447 assert self.__unpickler is not None
448 self.__stage = self._STAGE_readComponents
449 self.__readComponentSet(self.__unpickler)
450 self.__unpickler = None
451 continue
452 raise pyxb.LogicError('Too many stages (at %s, want %s)' % (self.__stage, stage))
453 except:
454 self.__stage = None
455 self.__unpickler = None
456 raise
457
459 """Read all the components from this archive, integrating them into
460 their respective namespaces."""
461 self._readToStage(self._STAGE_COMPLETE)
462
464 """Store the namespaces into the archive.
465
466 @param output: An instance substitutable for a writable file, or the
467 name of a file to write to.
468 """
469 import sys
470
471 assert NamespaceArchive.__PicklingArchive is None
472 NamespaceArchive.__PicklingArchive = self
473 assert self.__moduleRecords is not None
474
475 # Recalculate the record/object associations: we didn't assign
476 # anonymous names to the indeterminate scope objects because they
477 # weren't needed for bindings, but they are needed in the archive.
478 for mr in self.__moduleRecords:
479 mr.namespace()._associateOrigins(mr)
480
481 try:
482 # See http://bugs.python.org/issue3338
483 recursion_limit = sys.getrecursionlimit()
484 sys.setrecursionlimit(10 * recursion_limit)
485
486 pickler = self.__createPickler(output)
487
488 assert isinstance(self.__moduleRecords, set)
489 pickler.dump(self.__moduleRecords)
490
491 for mr in self.__moduleRecords:
492 pickler.dump(mr.namespace())
493 pickler.dump(mr.categoryObjects())
494 finally:
495 sys.setrecursionlimit(recursion_limit)
496 NamespaceArchive.__PicklingArchive = None
497
499 archive_path = self.__archivePath
500 if archive_path is None:
501 archive_path = '??'
502 return 'NSArchive@%s' % (archive_path,)
503
505 """Mix-in to any object that can be stored in a namespace within an archive."""
506
507 # Need to set this per category item
508 __objectOrigin = None
510 return self.__objectOrigin
512 if (self.__objectOrigin is not None) and (not override):
513 if self.__objectOrigin != object_origin:
514 raise pyxb.LogicError('Inconsistent origins for object %s: %s %s' % (self, self.__objectOrigin, object_origin))
515 else:
516 self.__objectOrigin = object_origin
517
519 #assert self.__objectOrigin is not None
520 if self._objectOrigin() is not None:
521 return getattr(super(_ArchivableObject_mixin, self), '_prepareForArchive_csc', lambda *_args,**_kw: self)(self._objectOrigin().moduleRecord())
522 assert not isinstance(self, pyxb.xmlschema.structures._NamedComponent_mixin)
523
525 return getattr(super(_ArchivableObject_mixin, self), '_updateFromOther_csc', lambda *_args,**_kw: self)(other)
526
528 """Update this instance with additional information provided by the other instance.
529
530 This is used, for example, when a built-in type is already registered
531 in the namespace, but we've processed the corresponding schema and
532 have obtained more details."""
533 assert self != other
534 return self._updateFromOther_csc(other)
535
537 from pyxb.namespace import builtin
538 assert self._objectOrigin()
539 return builtin.BuiltInObjectUID == self._objectOrigin().generationUID()
540
542 """Encapsulate the operations and data relevant to archiving namespaces.
543
544 This class mixes-in to L{pyxb.namespace.Namespace}"""
545
547 """CSC extension to reset fields of a Namespace.
548
549 This one handles category-related data."""
550 getattr(super(_NamespaceArchivable_mixin, self), '_reset', lambda *args, **kw: None)()
551 self.__loadedFromArchive = None
552 self.__wroteToArchive = None
553 self.__active = False
554 self.__moduleRecordMap = {}
555
557 return self.__loadedFromArchive
558
559 __wroteToArchive = None
560 __loadedFromArchive = None
561
563 if self.__isActive and empty_inactive:
564 for (ct, cm) in self._categoryMap().items():
565 if 0 < len(cm):
566 return True
567 return False
568 return self.__isActive
569
571 self.__isActive = True
572 __isActive = None
573
576
582
584 # Yes, I do want this to raise KeyError if the archive is not present
585 mr = self.__moduleRecordMap[archive.generationUID()]
586 assert not mr.isIncorporated(), 'Removing archive %s after incorporation' % (archive.archivePath(),)
587 del self.__moduleRecordMap[archive.generationUID()]
588
590 """Return C{True} iff the component model for this namespace can be
591 loaded from a namespace archive."""
592 for mr in self.moduleRecords():
593 if mr.isLoadable():
594 return True
595 return False
596
598 """Return the list of archives from which components for this
599 namespace can be loaded."""
600 rv = []
601 for mr in self.moduleRecords():
602 if mr.isLoadable():
603 rv.append(mr.archive())
604 return rv
605
608 __moduleRecordMap = None
609
611 assert isinstance(module_record, ModuleRecord)
612 # This assertion will fail if the binding is loaded before its archive
613 # is scanned.
614 assert not (module_record.generationUID() in self.__moduleRecordMap)
615 self.__moduleRecordMap[module_record.generationUID()] = module_record
616 return module_record
618 rv = self.__moduleRecordMap.get(generation_uid)
619 if (rv is None) and create_if_missing:
620 rv = self.addModuleRecord(ModuleRecord(self, generation_uid, *args, **kw))
621 return rv
622
624 #assert not self.__isActive, 'ERROR: State set for active namespace %s' % (self,)
625 return getattr(super(_NamespaceArchivable_mixin, self), '_getState_csc', lambda _kw: _kw)(kw)
626
628 """Prevent loading this namespace from an archive.
629
630 This marks all archives in which the namespace appears, whether
631 publically or privately, as not loadable."""
632 if self._loadedFromArchive():
633 raise pyxb.NamespaceError(self, 'cannot mark not loadable when already loaded')
634 for mr in self.moduleRecords():
635 mr._setIsLoadable(False)
636
638 __PrivateTransient = set()
639
641 return self.__namespace
642 __namespace = None
643
645 return self.__archive
649 __archive = None
650 __PrivateTransient.add('archive')
651
653 return self.__isPublic
657 __isPublic = None
658
662 assert self.__isLoadable
663 self.__isIncorporated = True
664 self.__isLoadable = False
665 return self
666 __isIncorporated = None
667 __PrivateTransient.add('isIncorporated')
668
674 __isLoadable = None
675
677 return self.__generationUID
678 __generationUID = None
679
683 assert isinstance(origin, _ObjectOrigin)
684 assert not (origin.signature() in self.__originMap)
685 self.__originMap[origin.signature()] = origin
686 return origin
690 if self.__originMap is None:
691 self.__originMap = {}
692 else:
693 self.__originMap.clear()
694 [ self.addOrigin(_o) for _o in origins ]
695 return self
696 __originMap = None
697
703
705 return self.__modulePath
707 assert (module_path is None) or isinstance(module_path, basestring)
708 self.__modulePath = module_path
709 return self
710 __modulePath = None
711
713 return self.__module
715 self.__module = module
716 # This is a nice idea, but screws up the unit tests that already have
717 # ns1 and the like logged as expected prefixes. Only causes a problem
718 # when the tests are run individually; dunno why.
719 #ns = self.namespace()
720 #if (ns.prefix() is None) and (module is not None):
721 # try:
722 # ns.setPrefix(os.path.basename(os.path.normpath(module.__file__)).split('.')[0])
723 # except AttributeError:
724 # pass
725 return self
726 __module = None
727 __PrivateTransient.add('module')
728
730 return self.__referencedNamespaces
737 __referencedNamespaces = None
738
739 __constructedLocally = False
740 __PrivateTransient.add('constructedLocally')
741
743 from pyxb.namespace import builtin
744
745 super(ModuleRecord, self).__init__()
746 self.__namespace = namespace
747 assert (generation_uid != builtin.BuiltInObjectUID) or namespace.isBuiltinNamespace()
748 self.__isPublic = kw.get('is_public', False)
749 self.__isIncoporated = kw.get('is_incorporated', False)
750 self.__isLoadable = kw.get('is_loadable', True)
751 assert isinstance(generation_uid, pyxb.utils.utility.UniqueIdentifier)
752 self.__generationUID = generation_uid
753 self.__modulePath = kw.get('module_path')
754 self.__module = kw.get('module')
755 self.__originMap = {}
756 self.__referencedNamespaces = set()
757 self.__categoryObjects = { }
758 self.__constructedLocally = True
759 self.__dependsOnExternal = set()
760
762 if (not self.__constructedLocally) or other.__constructedLocally:
763 raise pyxb.ImplementationError('Module record update requires local to be updated from archive')
764 assert self.__generationUID == other.__generationUID
765 assert self.__archive is None
766 self.__isPublic = other.__isPublic
767 assert not self.__isIncorporated
768 self.__isLoadable = other.__isLoadable
769 self.__modulePath = other.__modulePath
770 # self.__module already set correctly
771 self.__originMap.update(other.__originMap)
772 self.__referencedNamespaces.update(other.__referencedNamespaces)
773 if not (other.__categoryObjects is None):
774 self.__categoryObjects.update(other.__categoryObjects)
775 self.__dependsOnExternal.update(other.__dependsOnExternal)
776 self._setArchive(archive)
777
779 return self.__categoryObjects
781 self.__categoryObjects.clear()
782 for origin in self.origins():
783 origin.resetCategoryMembers()
788 assert self.__categoryObjects is None
789 assert not self.__constructedLocally
790 ns = self.namespace()
791 ns.configureCategories(category_objects.keys())
792 for (cat, obj_map) in category_objects.iteritems():
793 current_map = ns.categoryMap(cat)
794 for (local_name, component) in obj_map.iteritems():
795 existing_component = current_map.get(local_name)
796 if existing_component is None:
797 current_map[local_name] = component
798 elif existing_component._allowUpdateFromOther(component):
799 existing_component._updateFromOther(component)
800 else:
801 raise pyxb.NamespaceError(self, 'Load attempted to override %s %s in %s' % (cat, local_name, self.namespace()))
802 self.markIncorporated()
803 __categoryObjects = None
804 __PrivateTransient.add('categoryObjects')
805
807 return self.__dependsOnExternal
808 __dependsOnExternal = None
809
811 assert self.archive() is None
812 self._setArchive(archive)
813 ns = self.namespace()
814 self.__dependsOnExternal.clear()
815 for mr in ns.moduleRecords():
816 if mr != self:
817 _log.info('This gen depends on %s', mr)
818 self.__dependsOnExternal.add(mr.generationUID())
819 for obj in ns._namedObjects().union(ns.components()):
820 if isinstance(obj, _ArchivableObject_mixin):
821 if obj._objectOrigin():
822 obj._prepareForArchive(self)
823
825 self.namespace()._transferReferencedNamespaces(self)
826 self.namespace()._associateOrigins(self)
827
830
832 """Marker class for objects that can serve as an origin for an object in a
833 namespace."""
834 __PrivateTransient = set()
835
837 return self.__signature
838 __signature = None
839
841 return self.__moduleRecord
842 __moduleRecord = None
843
846
849
851 self.__signature = kw.pop('signature', None)
852 super(_ObjectOrigin, self).__init__(**kw)
853 self.__moduleRecord = namespace.lookupModuleRecordByUID(generation_uid, create_if_missing=True, **kw)
854 self.__moduleRecord.addOrigin(self)
855 self.__categoryMembers = { }
856 self.__categoryObjectMap = { }
857
859 self.__categoryMembers.clear()
860 self.__categoryObjectMap.clear()
861 self.__originatedComponents = None
863 self.__categoryMembers.setdefault(category, set()).add(name)
864 self.__categoryObjectMap.setdefault(category, {})[name] = obj
865 self.__moduleRecord._addCategoryObject(category, name, obj)
867 return self.__categoryMembers
869 if self.__originatedObjects is None:
870 components = set()
871 [ components.update(_v.values()) for _v in self.__categoryObjectMap.itervalues() ]
872 self.__originatedObjects = frozenset(components)
873 return self.__originatedObjects
874
875 # The set of category names associated with objects. Don't throw this
876 # away and use categoryObjectMap.keys() instead: that's transient, and we
877 # need this to have a value when read from an archive.
878 __categoryMembers = None
879
880 # Map from category name to a map from an object name to the object
881 __categoryObjectMap = None
882 __PrivateTransient.add('categoryObjectMap')
883
884 # The set of objects that originated at this origin
885 __originatedObjects = None
886 __PrivateTransient.add('originatedObjects')
887
889 """Holds the data regarding components derived from a single schema.
890
891 Coupled to a particular namespace through the
892 L{_NamespaceComponentAssociation_mixin}.
893 """
894
895 __PrivateTransient = set()
896
898 schema = kw.get('schema')
899 if schema is not None:
900 assert not ('location' in kw)
901 kw['location'] = schema.location()
902 assert not ('signature' in kw)
903 kw['signature'] = schema.signature()
904 assert not ('generation_uid' in kw)
905 kw['generation_uid'] = schema.generationUID()
906 assert not ('namespace' in kw)
907 kw['namespace'] = schema.targetNamespace()
908 assert not ('version' in kw)
909 kw['version'] = schema.schemaAttribute('version')
910
912 """Determine whether this record matches the parameters.
913
914 @keyword schema: a L{pyxb.xmlschema.structures.Schema} instance from
915 which the other parameters are obtained.
916 @keyword location: a schema location (URI)
917 @keyword signature: a schema signature
918 @return: C{True} iff I{either} C{location} or C{signature} matches."""
919 self.__setDefaultKW(kw)
920 location = kw.get('location')
921 if (location is not None) and (self.location() == location):
922 return True
923 signature = kw.get('signature')
924 if (signature is not None) and (self.signature() == signature):
925 return True
926 return False
927
929 return self.__location
930 __location = None
931
933 return self.__schema
934 __schema = None
935 __PrivateTransient.add('schema')
936
938 return self.__version
939 __version = None
940
942 self.__setDefaultKW(kw)
943 self.__schema = kw.pop('schema', None)
944 self.__location = kw.pop('location', None)
945 self.__version = kw.pop('version', None)
946 super(_SchemaOrigin, self).__init__(kw.pop('namespace'), kw.pop('generation_uid'), **kw)
947
954
956
958 return self.__rootNamespaces
959 __rootNamespaces = None
960
962 if reset or (self.__namespaceGraph is None):
963 self.__namespaceGraph = pyxb.utils.utility.Graph()
964 map(self.__namespaceGraph.addRoot, self.rootNamespaces())
965
966 # Make sure all referenced namespaces have valid components
967 need_check = self.__rootNamespaces.copy()
968 done_check = set()
969 while 0 < len(need_check):
970 ns = need_check.pop()
971 ns.validateComponentModel()
972 self.__namespaceGraph.addNode(ns)
973 for rns in ns.referencedNamespaces().union(ns.importedNamespaces()):
974 self.__namespaceGraph.addEdge(ns, rns)
975 if not rns in done_check:
976 need_check.add(rns)
977 if not ns.hasSchemaComponents():
978 _log.warning('Referenced %s has no schema components', ns.uri())
979 done_check.add(ns)
980 assert done_check == self.__namespaceGraph.nodes()
981
982 return self.__namespaceGraph
983 __namespaceGraph = None
984
987
989 siblings = set()
990 ns_graph = self.namespaceGraph(reset)
991 for ns in self.__rootNamespaces:
992 ns_siblings = ns_graph.sccMap().get(ns)
993 if ns_siblings is not None:
994 siblings.update(ns_siblings)
995 else:
996 siblings.add(ns)
997 return siblings
998
1000 if self.__siblingNamespaces is None:
1001 self.__siblingNamespaces = self.siblingsFromGraph()
1002 return self.__siblingNamespaces
1003
1005 self.__siblingNamespaces = sibling_namespaces
1006
1007 __siblingNamespaces = None
1008
1011
1013 if reset or (self.__componentGraph is None):
1014 self.__componentGraph = pyxb.utils.utility.Graph()
1015 all_components = set()
1016 for ns in self.siblingNamespaces():
1017 [ all_components.add(_c) for _c in ns.components() if _c.hasBinding() ]
1018
1019 need_visit = all_components.copy()
1020 while 0 < len(need_visit):
1021 c = need_visit.pop()
1022 self.__componentGraph.addNode(c)
1023 for cd in c.bindingRequires(include_lax=True):
1024 if cd in all_components:
1025 self.__componentGraph.addEdge(c, cd)
1026 return self.__componentGraph
1027 __componentGraph = None
1028
1031
1033 namespace_set = set(kw.get('namespace_set', []))
1034 namespace = kw.get('namespace')
1035 if namespace is not None:
1036 namespace_set.add(namespace)
1037 if 0 == len(namespace_set):
1038 raise pyxb.LogicError('NamespaceDependencies requires at least one root namespace')
1039 self.__rootNamespaces = namespace_set
1040
1041
1042 ## Local Variables:
1043 ## fill-column:78
1044 ## End:
1045
| Home | Trees | Indices | Help |
|
|---|
| Generated by Epydoc 3.0.1 on Mon Dec 17 13:09:11 2012 | http://epydoc.sourceforge.net |