Skip to content

API Reference

Manage configuration with pydantic.

config

The base configuration model class BaseConfig.

ModelConfig

Bases: SettingsConfigDict

Meta-configuration for configzen models.

Source code in configzen/config.py
45
46
47
48
49
50
class ModelConfig(SettingsConfigDict, total=False):
    """Meta-configuration for configzen models."""

    config_source: str | ConfigSource[Any, Any]
    rebuild_on_load: bool
    processor_factory: Callable[..., ConfigProcessor]

BaseConfig

Bases: BaseSettings

Base class for all configuration models.

Source code in configzen/config.py
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
class BaseConfig(BaseSettings, metaclass=BaseConfigMetaclass):
    """Base class for all configuration models."""

    _config_source: ConfigSource[Any, Any] = PrivateAttr()
    _config_data: Data = PrivateAttr(default_factory=dict)
    _config_processor: ConfigProcessor = PrivateAttr()
    _config_root: BaseConfig | None = PrivateAttr(default=None)

    def __init__(self, **data: Any) -> None:
        try:
            owner = owner_lookup.get()
        except LookupError:
            owner = None
            if processing.get():
                owner_lookup.set(self)
        super().__init__(**data)
        self._config_root = owner

    # Mark the configzen's constructor as a non-custom constructor.
    __init__.__pydantic_base_init__ = True  # type: ignore[attr-defined]

    @property
    def config_root(self) -> BaseConfig:
        """Return the root configuration that was used to load the entire data."""
        return self._config_root or self

    @property
    def config_source(self) -> ConfigSource[Any, Any] | None:
        """Return the configuration source that was used to load the configuration."""
        if self._config_root is None:
            # Since _config_source is a private attribute
            # without a default value, we need to use getattr
            # to avoid an AttributeError in case this attribute
            # was not set (which may happen when the configuration
            # is instantiated manually).
            return getattr(self, "_config_source", None)
        return self._config_root.config_source

    @property
    def config_data(self) -> Data:
        """Return the configuration that was loaded from the configuration source."""
        if self._config_root is None:
            return self._config_data
        return self._config_root.config_data

    @property
    def config_processor(self) -> ConfigProcessor:
        """
        Current configuration processor.

        Processor stores the initial data used when loading the configuration,
        resolves macros etc.
        """
        if self._config_root is None:
            if not hasattr(self, "_config_processor"):
                return FileSystemAwareConfigProcessor(self.config_dump())
            return self._config_processor
        return self._config_root.config_processor

    def config_find_routes(
        self,
        subconfig: BaseConfig,
    ) -> set[Route]:
        """
        Locate all occurrences of a subconfiguration in the current configuration.

        Return a set of routes to the located subconfiguration.
        """
        if not isinstance(subconfig, BaseConfig):
            msg = f"Expected a BaseConfig subclass instance, got {type(subconfig)!r}"
            raise TypeError(msg)
        return set(
            _locate_in_mapping(vars(self), subconfig, attribute_access=True),
        )

    def config_find_route(self, subconfig: BaseConfig) -> Route:
        """Locate exactly one (closest) route to the given subconfiguration."""
        all_routes = self.config_find_routes(subconfig)
        if not all_routes:
            msg = f"Unable to locate subconfiguration {subconfig}"
            raise LookupError(msg)
        return next(iter(all_routes))

    @classmethod
    def _validate_config_source(
        cls,
        source: object | None = None,
    ) -> ConfigSource[Any, Any]:
        if source is None:
            source = cls.model_config.get("config_source")
        if source is None:
            msg = f"No config source provided when loading {cls.__name__}"
            raise ValueError(msg)
        if not isinstance(source, ConfigSource):
            source = get_config_source(source)
            if source is None:
                msg = (
                    f"Could not create a config source from {source!r} "
                    f"of type {type(source)!r}"
                )
                raise ValueError(msg)
        return source

    @classmethod
    def _validate_processor_factory(
        cls,
        processor_factory: Callable[..., ConfigProcessor] | None = None,
    ) -> Callable[..., ConfigProcessor]:
        return (
            processor_factory
            or cast(
                "Callable[..., ConfigProcessor] | None",
                cls.model_config.get("config_processor_factory"),
            )
            or FileSystemAwareConfigProcessor
        )

    @classmethod
    def _try_rebuild_model(cls) -> None:
        # Possible scenarios:
        # (sync) Frame 1: <class>.config_load()
        # (sync) Frame 2: isolated_context_function.<locals>.copy()
        # (sync) Frame 3: run_isolated()
        # (sync) Frame 4: <class>.config_load()
        # (sync) Frame 5: <class>.model_rebuild()
        #
        # (async) Frame 1: <class>.config_load_async()
        # (async) Frame 2: isolated_context_function.<locals>.copy()
        # (async) Frame 3: run_isolated()
        # (async) Frame 4: <class>.config_load()
        # (async) Frame 5: <class>.model_rebuild()
        if cls.model_config["rebuild_on_load"]:
            with suppress(Exception):
                cls.model_rebuild(_parent_namespace_depth=5)

    @classmethod
    @isolated_context_function
    def config_load(
        cls,
        source: object | None = None,
        *,
        processor_factory: Callable[..., ConfigProcessor] | None = None,
    ) -> Self:
        """
        Load this configuration from a given source.

        Parameters
        ----------
        source
            Where to load the configuration from. The argument passed is forwarded
            to `confizen.sources.get_config_source()` which will resolve
            the intended configuration source: for example, "abc.ini" will be resolved
            to a TOML text file source. Keep in mind, however, that for binary formats
            such as non-XML Plist you must specify its format type to binary, so in
            that case just create `BinaryFileConfigSource("plist_file.plist")`.
        context
            The context to use during model validation.
            See also [`model_validate`][pydantic.BaseModel.model_validate].
        processor_factory
            The state factory to use to parse the newly loaded configuration data.

        Returns
        -------
        self

        """
        cls._try_rebuild_model()

        # Validate the source we load our configuration from.
        config_source = cls._validate_config_source(source)

        # Validate the processor we use to parse the loaded configuration data.
        make_processor = cls._validate_processor_factory(processor_factory)

        # Load the configuration data from the sanitized source.
        # Keep in mind the loaded data object keeps all the additional
        # metadata that we want to keep.
        # Then we pass it to the processor factory to process the configuration data
        # into a bare dictionary that does not hold anything else
        # than the configuration data, by using `processor.get_processed_data()`.
        processor = make_processor(config_source.load())

        # ruff: noqa: FBT003
        try:
            processing.set(ProcessingContext(cls, processor, trace=[config_source]))

            # Processing will execute any commands that are present
            # in the configuration data and return the final configuration
            # data that we will use to construct an instance of the configuration model.
            # During this process, we lose all the additional metadata that we
            # want to keep in the configuration data.
            # They will be added back to the exported data when the configuration
            # is saved (`processor.revert_processor_changes()`).
            self = cls(**processor.get_processed_data())
        finally:
            processing.set(None)

        # Quick setup and we're done.
        self._config_source = config_source
        self._config_processor = processor
        return self

    @classmethod
    @isolated_context_coroutine
    async def config_load_async(
        cls,
        source: object | None = None,
        *,
        processor_factory: Callable[..., ConfigProcessor] | None = None,
    ) -> Self:
        """
        Do the same as `config_load`, but asynchronously (no I/O blocking).

        Parameters
        ----------
        source
            Where to load the configuration from. The argument passed is forwarded
            to `confizen.sources.get_config_source()` which will resolve
            the intended configuration source: for example, "abc.ini" will be resolved
            to a TOML text file source. Keep in mind, however, that for binary formats
            such as non-XML Plist you must specify its format type to binary, so in
            that case just create `BinaryFileConfig"plist_file.plist")`.
        processor_factory
            The state factory to use to parse the newly loaded configuration data.

        Returns
        -------
        self

        """
        cls._try_rebuild_model()

        # Intentionally not using `run_sync(config_load)` here.
        # We want to keep make the set up instructions blocking to avoid running
        # into mutexes.

        config_source = cls._validate_config_source(source)
        make_processor = cls._validate_processor_factory(processor_factory)
        processor = make_processor(await config_source.load_async())

        try:
            processing.set(ProcessingContext(cls, processor, trace=[config_source]))

            self = cls(**await run_sync(processor.get_processed_data))
        finally:
            processing.set(None)

        self._config_processor = processor
        self._config_source = config_source
        return self

    def config_reload(self) -> Self:
        """Reload the configuration from the same source."""
        source = self.config_source

        if source is None:
            msg = "Cannot reload a manually instantiated configuration"
            raise RuntimeError(msg)

        root = self.config_root

        # Create a new processor with the same options as the current one.
        processor = root.config_processor.create_processor(source.load())

        # Construct a new configuration instance.
        # Respect __class__ attribute in case root might be a proxy (from proxyvars).
        new_root = root.__class__(**processor.get_processed_data())

        # Copy values from the freshly loaded configuration into our instance.
        if root is self:
            new_data = new_root.config_dump()
        else:
            route_to_self = root.config_find_route(self)
            new_data = cast("Self", route_to_self.get(new_root)).config_dump()

        for key, value in new_data.items():
            setattr(self, key, value)

        return self

    async def config_reload_async(self) -> Self:
        """Do the same as `config_reload` asynchronously (no I/O blocking)."""
        source = self.config_source

        if source is None:
            msg = "Cannot reload a manually instantiated configuration"
            raise RuntimeError(msg)

        root = self.config_root

        # Create a new state processor the same options as the current one.
        processor = root.config_processor.create_processor(source.load())

        # Construct a new configuration instance.
        new_root = root.__class__(**await run_sync(processor.get_processed_data))

        # Copy values from the freshly loaded configuration into our instance.
        if root is self:
            new_data = new_root.config_dump()
        else:
            route_to_self = root.config_find_route(self)
            new_data = cast("Self", route_to_self.get(new_root)).config_dump()

        for key, value in new_data.items():
            setattr(self, key, value)

        return self

    def _config_data_save(
        self,
        destination: object | None = None,
    ) -> tuple[ConfigSource[Any, Any], Data]:
        if destination is None:
            config_destination = self.config_source
        else:
            config_destination = self._validate_config_source(destination)

        if config_destination is None:
            msg = "Cannot save configuration (source/destination unknown)"
            raise RuntimeError(msg)

        root = self.config_root
        processor = self.config_processor

        if root is self:
            new_data = self.config_dump()
        else:
            # Construct a new configuration instance.
            # Respect `__class__` attribute: root might be a proxy, e.g. from proxyvars.
            new_root = root.__class__(**processor.get_processed_data())
            routes = root.config_find_routes(self)

            for route in routes:
                route.set(new_root, self)

            new_data = new_root.config_dump()

        parsed_data = processor.get_processed_data()
        roundtrip_update_mapping(roundtrip_data=parsed_data, mergeable_data=new_data)
        flat_new_data = parsed_data.revert_replacements()

        data = processor.roundtrip_initial
        config_destination.data_format.roundtrip_update_mapping(
            roundtrip_data=data,
            mergeable_data=flat_new_data,
        )
        return config_destination, data

    def config_save(self, destination: object | None = None) -> Self:
        """
        Save the configuration to a given destination.

        Parameters
        ----------
        destination
            Where to save the configuration to. The argument passed is forwarded
            to `confizen.sources.get_config_source()` which will resolve
            the intended configuration source: for example, "abc.ini" will be resolved
            to a TOML text file source. Keep in mind, however, that for binary formats
            such as non-XML Plist you must specify its format type to binary, so in
            that case just create `BinaryFileConfigSource("plist_file.plist")`.

        """
        config_destination, data = self._config_data_save(destination)
        config_destination.dump(data)
        return self

    async def config_save_async(self, destination: object | None = None) -> Self:
        """
        Do the same as `config_save`, but asynchronously (no I/O blocking).

        Parameters
        ----------
        destination
            Where to save the configuration to. The argument passed is forwarded
            to `confizen.sources.get_config_source()` which will resolve
            the intended configuration source: for example, "abc.ini" will be resolved
            to a TOML text file source. Keep in mind, however, that for binary formats
            such as non-XML Plist you must specify its format type to binary, so in
            that case just create `BinaryFileConfigSource("plist_file.plist")`.

        """
        config_destination, data = self._config_data_save(destination)
        await config_destination.dump_async(data)
        return self

    def config_at(self, *routes: RouteLike) -> Item:
        """Return a configuration item at the given set of routes."""
        return Item(routes=set(map(Route, routes)), config=self)

    def config_dump(self) -> dict[str, object]:
        """Return a dictionary representation of the configuration."""
        return super().model_dump()

    def __getitem__(self, routes: RouteLike | tuple[RouteLike, ...]) -> Item:
        """Return a configuration item at the given set of routes."""
        if isinstance(routes, tuple):
            return self.config_at(*routes)
        return self.config_at(routes)

    def __setitem__(self, item: RouteLike, value: Any) -> None:
        """Set a configuration item at the given set of routes."""
        self.config_at(item).config = value

    def __init_subclass__(cls, **kwargs: Unpack[ModelConfig]) -> None:
        """Initialize the configuration subclass."""
        super().__init_subclass__(**cast("BaseConfigDict", kwargs))

    model_config: ClassVar[ModelConfig] = ModelConfig(
        # Be lenient about forward references.
        rebuild_on_load=True,
        # Keep the configuration valid & fail-proof for the whole time.
        validate_assignment=True,
        # Make it easier to spot typos.
        extra="forbid",
    )

config_root: BaseConfig property

Return the root configuration that was used to load the entire data.

config_source: ConfigSource[Any, Any] | None property

Return the configuration source that was used to load the configuration.

config_data: Data property

Return the configuration that was loaded from the configuration source.

config_processor: ConfigProcessor property

Current configuration processor.

Processor stores the initial data used when loading the configuration, resolves macros etc.

config_find_routes(subconfig: BaseConfig) -> set[Route]

Locate all occurrences of a subconfiguration in the current configuration.

Return a set of routes to the located subconfiguration.

Source code in configzen/config.py
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
def config_find_routes(
    self,
    subconfig: BaseConfig,
) -> set[Route]:
    """
    Locate all occurrences of a subconfiguration in the current configuration.

    Return a set of routes to the located subconfiguration.
    """
    if not isinstance(subconfig, BaseConfig):
        msg = f"Expected a BaseConfig subclass instance, got {type(subconfig)!r}"
        raise TypeError(msg)
    return set(
        _locate_in_mapping(vars(self), subconfig, attribute_access=True),
    )

config_find_route(subconfig: BaseConfig) -> Route

Locate exactly one (closest) route to the given subconfiguration.

Source code in configzen/config.py
225
226
227
228
229
230
231
def config_find_route(self, subconfig: BaseConfig) -> Route:
    """Locate exactly one (closest) route to the given subconfiguration."""
    all_routes = self.config_find_routes(subconfig)
    if not all_routes:
        msg = f"Unable to locate subconfiguration {subconfig}"
        raise LookupError(msg)
    return next(iter(all_routes))

config_load(source: object | None = None, *, processor_factory: Callable[..., ConfigProcessor] | None = None) -> Self classmethod

Load this configuration from a given source.

Parameters:

Name Type Description Default
source object | None

Where to load the configuration from. The argument passed is forwarded to confizen.sources.get_config_source() which will resolve the intended configuration source: for example, "abc.ini" will be resolved to a TOML text file source. Keep in mind, however, that for binary formats such as non-XML Plist you must specify its format type to binary, so in that case just create BinaryFileConfigSource("plist_file.plist").

None
context

The context to use during model validation. See also [model_validate][pydantic.BaseModel.model_validate].

required
processor_factory Callable[..., ConfigProcessor] | None

The state factory to use to parse the newly loaded configuration data.

None

Returns:

Type Description
self
Source code in configzen/config.py
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
@classmethod
@isolated_context_function
def config_load(
    cls,
    source: object | None = None,
    *,
    processor_factory: Callable[..., ConfigProcessor] | None = None,
) -> Self:
    """
    Load this configuration from a given source.

    Parameters
    ----------
    source
        Where to load the configuration from. The argument passed is forwarded
        to `confizen.sources.get_config_source()` which will resolve
        the intended configuration source: for example, "abc.ini" will be resolved
        to a TOML text file source. Keep in mind, however, that for binary formats
        such as non-XML Plist you must specify its format type to binary, so in
        that case just create `BinaryFileConfigSource("plist_file.plist")`.
    context
        The context to use during model validation.
        See also [`model_validate`][pydantic.BaseModel.model_validate].
    processor_factory
        The state factory to use to parse the newly loaded configuration data.

    Returns
    -------
    self

    """
    cls._try_rebuild_model()

    # Validate the source we load our configuration from.
    config_source = cls._validate_config_source(source)

    # Validate the processor we use to parse the loaded configuration data.
    make_processor = cls._validate_processor_factory(processor_factory)

    # Load the configuration data from the sanitized source.
    # Keep in mind the loaded data object keeps all the additional
    # metadata that we want to keep.
    # Then we pass it to the processor factory to process the configuration data
    # into a bare dictionary that does not hold anything else
    # than the configuration data, by using `processor.get_processed_data()`.
    processor = make_processor(config_source.load())

    # ruff: noqa: FBT003
    try:
        processing.set(ProcessingContext(cls, processor, trace=[config_source]))

        # Processing will execute any commands that are present
        # in the configuration data and return the final configuration
        # data that we will use to construct an instance of the configuration model.
        # During this process, we lose all the additional metadata that we
        # want to keep in the configuration data.
        # They will be added back to the exported data when the configuration
        # is saved (`processor.revert_processor_changes()`).
        self = cls(**processor.get_processed_data())
    finally:
        processing.set(None)

    # Quick setup and we're done.
    self._config_source = config_source
    self._config_processor = processor
    return self

config_load_async(source: object | None = None, *, processor_factory: Callable[..., ConfigProcessor] | None = None) -> Self async classmethod

Do the same as config_load, but asynchronously (no I/O blocking).

Parameters:

Name Type Description Default
source object | None

Where to load the configuration from. The argument passed is forwarded to confizen.sources.get_config_source() which will resolve the intended configuration source: for example, "abc.ini" will be resolved to a TOML text file source. Keep in mind, however, that for binary formats such as non-XML Plist you must specify its format type to binary, so in that case just create BinaryFileConfig"plist_file.plist").

None
processor_factory Callable[..., ConfigProcessor] | None

The state factory to use to parse the newly loaded configuration data.

None

Returns:

Type Description
self
Source code in configzen/config.py
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
@classmethod
@isolated_context_coroutine
async def config_load_async(
    cls,
    source: object | None = None,
    *,
    processor_factory: Callable[..., ConfigProcessor] | None = None,
) -> Self:
    """
    Do the same as `config_load`, but asynchronously (no I/O blocking).

    Parameters
    ----------
    source
        Where to load the configuration from. The argument passed is forwarded
        to `confizen.sources.get_config_source()` which will resolve
        the intended configuration source: for example, "abc.ini" will be resolved
        to a TOML text file source. Keep in mind, however, that for binary formats
        such as non-XML Plist you must specify its format type to binary, so in
        that case just create `BinaryFileConfig"plist_file.plist")`.
    processor_factory
        The state factory to use to parse the newly loaded configuration data.

    Returns
    -------
    self

    """
    cls._try_rebuild_model()

    # Intentionally not using `run_sync(config_load)` here.
    # We want to keep make the set up instructions blocking to avoid running
    # into mutexes.

    config_source = cls._validate_config_source(source)
    make_processor = cls._validate_processor_factory(processor_factory)
    processor = make_processor(await config_source.load_async())

    try:
        processing.set(ProcessingContext(cls, processor, trace=[config_source]))

        self = cls(**await run_sync(processor.get_processed_data))
    finally:
        processing.set(None)

    self._config_processor = processor
    self._config_source = config_source
    return self

config_reload() -> Self

Reload the configuration from the same source.

Source code in configzen/config.py
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
def config_reload(self) -> Self:
    """Reload the configuration from the same source."""
    source = self.config_source

    if source is None:
        msg = "Cannot reload a manually instantiated configuration"
        raise RuntimeError(msg)

    root = self.config_root

    # Create a new processor with the same options as the current one.
    processor = root.config_processor.create_processor(source.load())

    # Construct a new configuration instance.
    # Respect __class__ attribute in case root might be a proxy (from proxyvars).
    new_root = root.__class__(**processor.get_processed_data())

    # Copy values from the freshly loaded configuration into our instance.
    if root is self:
        new_data = new_root.config_dump()
    else:
        route_to_self = root.config_find_route(self)
        new_data = cast("Self", route_to_self.get(new_root)).config_dump()

    for key, value in new_data.items():
        setattr(self, key, value)

    return self

config_reload_async() -> Self async

Do the same as config_reload asynchronously (no I/O blocking).

Source code in configzen/config.py
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
async def config_reload_async(self) -> Self:
    """Do the same as `config_reload` asynchronously (no I/O blocking)."""
    source = self.config_source

    if source is None:
        msg = "Cannot reload a manually instantiated configuration"
        raise RuntimeError(msg)

    root = self.config_root

    # Create a new state processor the same options as the current one.
    processor = root.config_processor.create_processor(source.load())

    # Construct a new configuration instance.
    new_root = root.__class__(**await run_sync(processor.get_processed_data))

    # Copy values from the freshly loaded configuration into our instance.
    if root is self:
        new_data = new_root.config_dump()
    else:
        route_to_self = root.config_find_route(self)
        new_data = cast("Self", route_to_self.get(new_root)).config_dump()

    for key, value in new_data.items():
        setattr(self, key, value)

    return self

config_save(destination: object | None = None) -> Self

Save the configuration to a given destination.

Parameters:

Name Type Description Default
destination object | None

Where to save the configuration to. The argument passed is forwarded to confizen.sources.get_config_source() which will resolve the intended configuration source: for example, "abc.ini" will be resolved to a TOML text file source. Keep in mind, however, that for binary formats such as non-XML Plist you must specify its format type to binary, so in that case just create BinaryFileConfigSource("plist_file.plist").

None
Source code in configzen/config.py
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
def config_save(self, destination: object | None = None) -> Self:
    """
    Save the configuration to a given destination.

    Parameters
    ----------
    destination
        Where to save the configuration to. The argument passed is forwarded
        to `confizen.sources.get_config_source()` which will resolve
        the intended configuration source: for example, "abc.ini" will be resolved
        to a TOML text file source. Keep in mind, however, that for binary formats
        such as non-XML Plist you must specify its format type to binary, so in
        that case just create `BinaryFileConfigSource("plist_file.plist")`.

    """
    config_destination, data = self._config_data_save(destination)
    config_destination.dump(data)
    return self

config_save_async(destination: object | None = None) -> Self async

Do the same as config_save, but asynchronously (no I/O blocking).

Parameters:

Name Type Description Default
destination object | None

Where to save the configuration to. The argument passed is forwarded to confizen.sources.get_config_source() which will resolve the intended configuration source: for example, "abc.ini" will be resolved to a TOML text file source. Keep in mind, however, that for binary formats such as non-XML Plist you must specify its format type to binary, so in that case just create BinaryFileConfigSource("plist_file.plist").

None
Source code in configzen/config.py
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
async def config_save_async(self, destination: object | None = None) -> Self:
    """
    Do the same as `config_save`, but asynchronously (no I/O blocking).

    Parameters
    ----------
    destination
        Where to save the configuration to. The argument passed is forwarded
        to `confizen.sources.get_config_source()` which will resolve
        the intended configuration source: for example, "abc.ini" will be resolved
        to a TOML text file source. Keep in mind, however, that for binary formats
        such as non-XML Plist you must specify its format type to binary, so in
        that case just create `BinaryFileConfigSource("plist_file.plist")`.

    """
    config_destination, data = self._config_data_save(destination)
    await config_destination.dump_async(data)
    return self

config_at(*routes: RouteLike) -> Item

Return a configuration item at the given set of routes.

Source code in configzen/config.py
536
537
538
def config_at(self, *routes: RouteLike) -> Item:
    """Return a configuration item at the given set of routes."""
    return Item(routes=set(map(Route, routes)), config=self)

config_dump() -> dict[str, object]

Return a dictionary representation of the configuration.

Source code in configzen/config.py
540
541
542
def config_dump(self) -> dict[str, object]:
    """Return a dictionary representation of the configuration."""
    return super().model_dump()

__getitem__(routes: RouteLike | tuple[RouteLike, ...]) -> Item

Return a configuration item at the given set of routes.

Source code in configzen/config.py
544
545
546
547
548
def __getitem__(self, routes: RouteLike | tuple[RouteLike, ...]) -> Item:
    """Return a configuration item at the given set of routes."""
    if isinstance(routes, tuple):
        return self.config_at(*routes)
    return self.config_at(routes)

__setitem__(item: RouteLike, value: Any) -> None

Set a configuration item at the given set of routes.

Source code in configzen/config.py
550
551
552
def __setitem__(self, item: RouteLike, value: Any) -> None:
    """Set a configuration item at the given set of routes."""
    self.config_at(item).config = value

__init_subclass__(**kwargs: Unpack[ModelConfig]) -> None

Initialize the configuration subclass.

Source code in configzen/config.py
554
555
556
def __init_subclass__(cls, **kwargs: Unpack[ModelConfig]) -> None:
    """Initialize the configuration subclass."""
    super().__init_subclass__(**cast("BaseConfigDict", kwargs))

config_step(owner: type[BaseConfig], _annotation: Any, step: Step[Any]) -> Any

Return the value of a configuration attribute.

Source code in configzen/config.py
568
569
570
571
572
573
574
575
@advance_linked_route.register(BaseConfig)
def config_step(
    owner: type[BaseConfig],
    _annotation: Any,
    step: Step[Any],
) -> Any:
    """Return the value of a configuration attribute."""
    return owner.model_fields[step.key].annotation

context

Facilities for contextual processing.

isolated_context_function(func: Callable[_P, _T]) -> Callable[_P, _T]

Copy the context automatically on function call.

Allows to isolate the library context from the user context.

Used as a decorator.

Source code in configzen/context.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
def isolated_context_function(func: Callable[_P, _T]) -> Callable[_P, _T]:
    """
    Copy the context automatically on function call.

    Allows to isolate the library context from the user context.

    Used as a decorator.
    """
    if isinstance(func, (classmethod, staticmethod)):
        return type(func)(isolated_context_function(func.__func__))

    @wraps(func)
    def copy(*args: _P.args, **kwargs: _P.kwargs) -> _T:
        return run_isolated(func, *args, **kwargs)

    return copy

isolated_context_coroutine(func: Callable[_P, Coroutine[object, object, _T]]) -> Callable[_P, Coroutine[object, object, _T]]

Copy the context automatically on coroutine execution.

Allows to isolate library context from the user context.

Used as a decorator.

Source code in configzen/context.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
def isolated_context_coroutine(
    func: Callable[_P, Coroutine[object, object, _T]],
) -> Callable[_P, Coroutine[object, object, _T]]:
    """
    Copy the context automatically on coroutine execution.

    Allows to isolate library context from the user context.

    Used as a decorator.
    """
    if isinstance(func, (classmethod, staticmethod)):
        return type(func)(isolated_context_coroutine(func.__func__))

    @wraps(func)
    async def copy_async(*args: _P.args, **kwargs: _P.kwargs) -> _T:
        return await async_run_isolated(func, *args, **kwargs)

    return copy_async

run_isolated(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T

Run a function in an isolated context.

Source code in configzen/context.py
66
67
68
69
def run_isolated(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T:
    """Run a function in an isolated context."""
    context = contextvars.copy_context()
    return context.run(func, *args, **kwargs)

async_run_isolated(func: Callable[_P, Coroutine[object, object, _T]], *args: _P.args, **kwargs: _P.kwargs) -> asyncio.Task[_T]

Await a coroutine in an isolated context.

Source code in configzen/context.py
72
73
74
75
76
77
78
def async_run_isolated(
    func: Callable[_P, Coroutine[object, object, _T]],
    *args: _P.args,
    **kwargs: _P.kwargs,
) -> asyncio.Task[_T]:
    """Await a coroutine in an isolated context."""
    return asyncio.create_task(func(*args, **kwargs))

contrib

configzen.contrib: miscellaneous reusable configuration models from configzen.

pyproject

Configuration model for pyproject.toml files.

setup

Configuration model for pyproject.toml files.

data

Interfaces for encapsulation of configuring and using data formats.

BinaryDataFormat = DataFormat[DataFormatOptionsType, bytes] module-attribute

Core interface for configuring and using binary data formats through within configzen.

Do not use this class directly. If you need to implement your own binary data format, implement a subclass of this class. Remember to ensure that your subclass is executed, so that it gets registered to the registry of data formats.

TextDataFormat = DataFormat[DataFormatOptionsType, str] module-attribute

Core interface for configuring and using text data formats through within configzen.

Do not use this class directly. If you need to implement your own text data format, implement a subclass of this class. Remember to ensure that your subclass is executed, so that it gets registered to the registry of data formats.

DataFormatOptions

Bases: TypedDict

Base class for indicating possible options to configure a data format.

Source code in configzen/data.py
41
42
class DataFormatOptions(TypedDict, total=False):
    """Base class for indicating possible options to configure a data format."""

DataFormat

Bases: Generic[DataFormatOptionsType, AnyStr]

Core interface for configuring and using any data format through within configzen.

Do not use this class directly. If you need to implement your own data format, implement a subclass of: - BinaryDataFormat, if it is a bitwise data format, or - TextDataFormat, if it is a text data format.

Source code in configzen/data.py
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
@runtime_generic
class DataFormat(Generic[DataFormatOptionsType, AnyStr], metaclass=ABCMeta):
    """
    Core interface for configuring and using any data format through within configzen.

    Do not use this class directly.
    If you need to implement your own data format, implement a subclass of:
    - BinaryDataFormat, if it is a bitwise data format, or
    - TextDataFormat, if it is a text data format.
    """

    extension_registry: ClassVar[dict[str, type[DataFormat[Any, Any]]]] = {}

    default_extension: ClassVar[str]
    file_extensions: ClassVar[set[str]]
    option_name: ClassVar[str]

    def __init__(self, options: DataFormatOptionsType | None = None) -> None:
        self.configure(**(options or {}))

    @classmethod
    def for_extension(
        cls,
        extension_name: str,
        options: DataFormatOptionsType | None = None,
    ) -> DataFormat[Any, Any]:
        """Create a data format instance for an extension."""
        return cls.extension_registry[extension_name](options)

    if TYPE_CHECKING:

        @overload
        def is_binary(
            self: DataFormat[DataFormatOptionsType, bytes],
        ) -> Literal[True]: ...

        @overload
        def is_binary(
            self: DataFormat[DataFormatOptionsType, str],
        ) -> Literal[False]: ...

    def is_binary(self) -> bool:
        """Return whether the data format is bitwise."""
        return type_check(self, DataFormat[Any, bytes])

    # Unpack[DataFormatOptionsType] cannot be used here,
    # because this functionality is not supported by mypy yet.
    # Override the **options annotation in your subclass of DataFormat with
    # the subclass of DataFormatOptions corresponding to your subclass of DataFormat.
    def configure(self, **options: Unpack[DataFormatOptions]) -> None:
        """
        Configure the data format.

        Every data format provides its own options, related to comments, indentation,
        and other format-specific features.
        """

    @abstractmethod
    def load(self, stream: IO[AnyStr]) -> Data:
        """
        Load the data from a stream.

        Return a mutable mapping representing the loaded data
        which is mutation-sensitive (for round-trip processing).

        Every configuration source transforms the input data into a stream
        to be processed by the data format, because most data format libraries
        operate on streams.

        This method is called by the config instance.
        """

    @abstractmethod
    def dump(self, data: Data, stream: IO[AnyStr]) -> None:
        """
        Load the data from a stream.

        Every configuration source transforms the input data into a stream
        to be processed by the data format, because most libraries implementing
        data formats operate on streams.

        This method is called by the config instance.
        """

    @classmethod
    def register_file_extensions(cls) -> None:
        """Register the file extensions supported by this data format."""
        cls.extension_registry.update(dict.fromkeys(cls.file_extensions, cls))

    def validate_source(self, source: ConfigSource[Any, AnyStr]) -> None:
        """Validate the config source."""
        if self.is_binary() and not source.is_binary():
            msg = (
                f"{source} is not a binary source, "
                f"but {self.__class__.__name__} is a binary data format"
            )
            raise TypeError(msg)

    def roundtrip_update_mapping(
        self,
        roundtrip_data: Data,
        mergeable_data: MutableMapping[str, object],
    ) -> None:
        """
        Update the loaded data in a round-trip manner.

        Use values from the configuration altered programmatically in runtime,
        while keeping the structure and comments of the original data.

        Parameters
        ----------
        roundtrip_data
            The data to be updated. Stores the original structure, comments etc.
        mergeable_data
            The new values to be merged into the loaded data.

        """
        return roundtrip_update_mapping(
            roundtrip_data,
            mergeable_data,
            _recursive_update_mapping=self.roundtrip_update_mapping,
            _recursive_update_sequence=self.roundtrip_update_sequence,
        )

    def roundtrip_update_sequence(
        self,
        roundtrip_data: MutableSequence[object],
        mergeable_data: Sequence[object],
    ) -> None:
        """Merge new data sequence without losing comments."""
        return roundtrip_update_sequence(
            roundtrip_data,
            mergeable_data,
            _recursive_update_mapping=self.roundtrip_update_mapping,
            _recursive_update_sequence=self.roundtrip_update_sequence,
        )

    def __init_subclass__(cls, *, skip_hook: bool = False) -> None:
        """Subclass hook. Pass skip_hook=True to skip it."""
        if not skip_hook:
            if getattr(cls, "option_name", None) is None:
                msg = (
                    f"{cls.__name__} must have an option_name attribute "
                    "if it is not a class with skip_hook=True parameter"
                )
                raise TypeError(msg)
            if getattr(cls, "file_extensions", None) is None:
                cls.file_extensions = set()
            cls.file_extensions.add(cls.default_extension)
            cls.register_file_extensions()

for_extension(extension_name: str, options: DataFormatOptionsType | None = None) -> DataFormat[Any, Any] classmethod

Create a data format instance for an extension.

Source code in configzen/data.py
68
69
70
71
72
73
74
75
@classmethod
def for_extension(
    cls,
    extension_name: str,
    options: DataFormatOptionsType | None = None,
) -> DataFormat[Any, Any]:
    """Create a data format instance for an extension."""
    return cls.extension_registry[extension_name](options)

is_binary() -> bool

Return whether the data format is bitwise.

Source code in configzen/data.py
89
90
91
def is_binary(self) -> bool:
    """Return whether the data format is bitwise."""
    return type_check(self, DataFormat[Any, bytes])

configure(**options: Unpack[DataFormatOptions]) -> None

Configure the data format.

Every data format provides its own options, related to comments, indentation, and other format-specific features.

Source code in configzen/data.py
 97
 98
 99
100
101
102
103
def configure(self, **options: Unpack[DataFormatOptions]) -> None:
    """
    Configure the data format.

    Every data format provides its own options, related to comments, indentation,
    and other format-specific features.
    """

load(stream: IO[AnyStr]) -> Data abstractmethod

Load the data from a stream.

Return a mutable mapping representing the loaded data which is mutation-sensitive (for round-trip processing).

Every configuration source transforms the input data into a stream to be processed by the data format, because most data format libraries operate on streams.

This method is called by the config instance.

Source code in configzen/data.py
105
106
107
108
109
110
111
112
113
114
115
116
117
118
@abstractmethod
def load(self, stream: IO[AnyStr]) -> Data:
    """
    Load the data from a stream.

    Return a mutable mapping representing the loaded data
    which is mutation-sensitive (for round-trip processing).

    Every configuration source transforms the input data into a stream
    to be processed by the data format, because most data format libraries
    operate on streams.

    This method is called by the config instance.
    """

dump(data: Data, stream: IO[AnyStr]) -> None abstractmethod

Load the data from a stream.

Every configuration source transforms the input data into a stream to be processed by the data format, because most libraries implementing data formats operate on streams.

This method is called by the config instance.

Source code in configzen/data.py
120
121
122
123
124
125
126
127
128
129
130
@abstractmethod
def dump(self, data: Data, stream: IO[AnyStr]) -> None:
    """
    Load the data from a stream.

    Every configuration source transforms the input data into a stream
    to be processed by the data format, because most libraries implementing
    data formats operate on streams.

    This method is called by the config instance.
    """

register_file_extensions() -> None classmethod

Register the file extensions supported by this data format.

Source code in configzen/data.py
132
133
134
135
@classmethod
def register_file_extensions(cls) -> None:
    """Register the file extensions supported by this data format."""
    cls.extension_registry.update(dict.fromkeys(cls.file_extensions, cls))

validate_source(source: ConfigSource[Any, AnyStr]) -> None

Validate the config source.

Source code in configzen/data.py
137
138
139
140
141
142
143
144
def validate_source(self, source: ConfigSource[Any, AnyStr]) -> None:
    """Validate the config source."""
    if self.is_binary() and not source.is_binary():
        msg = (
            f"{source} is not a binary source, "
            f"but {self.__class__.__name__} is a binary data format"
        )
        raise TypeError(msg)

roundtrip_update_mapping(roundtrip_data: Data, mergeable_data: MutableMapping[str, object]) -> None

Update the loaded data in a round-trip manner.

Use values from the configuration altered programmatically in runtime, while keeping the structure and comments of the original data.

Parameters:

Name Type Description Default
roundtrip_data Data

The data to be updated. Stores the original structure, comments etc.

required
mergeable_data MutableMapping[str, object]

The new values to be merged into the loaded data.

required
Source code in configzen/data.py
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
def roundtrip_update_mapping(
    self,
    roundtrip_data: Data,
    mergeable_data: MutableMapping[str, object],
) -> None:
    """
    Update the loaded data in a round-trip manner.

    Use values from the configuration altered programmatically in runtime,
    while keeping the structure and comments of the original data.

    Parameters
    ----------
    roundtrip_data
        The data to be updated. Stores the original structure, comments etc.
    mergeable_data
        The new values to be merged into the loaded data.

    """
    return roundtrip_update_mapping(
        roundtrip_data,
        mergeable_data,
        _recursive_update_mapping=self.roundtrip_update_mapping,
        _recursive_update_sequence=self.roundtrip_update_sequence,
    )

roundtrip_update_sequence(roundtrip_data: MutableSequence[object], mergeable_data: Sequence[object]) -> None

Merge new data sequence without losing comments.

Source code in configzen/data.py
172
173
174
175
176
177
178
179
180
181
182
183
def roundtrip_update_sequence(
    self,
    roundtrip_data: MutableSequence[object],
    mergeable_data: Sequence[object],
) -> None:
    """Merge new data sequence without losing comments."""
    return roundtrip_update_sequence(
        roundtrip_data,
        mergeable_data,
        _recursive_update_mapping=self.roundtrip_update_mapping,
        _recursive_update_sequence=self.roundtrip_update_sequence,
    )

__init_subclass__(*, skip_hook: bool = False) -> None

Subclass hook. Pass skip_hook=True to skip it.

Source code in configzen/data.py
185
186
187
188
189
190
191
192
193
194
195
196
197
def __init_subclass__(cls, *, skip_hook: bool = False) -> None:
    """Subclass hook. Pass skip_hook=True to skip it."""
    if not skip_hook:
        if getattr(cls, "option_name", None) is None:
            msg = (
                f"{cls.__name__} must have an option_name attribute "
                "if it is not a class with skip_hook=True parameter"
            )
            raise TypeError(msg)
        if getattr(cls, "file_extensions", None) is None:
            cls.file_extensions = set()
        cls.file_extensions.add(cls.default_extension)
        cls.register_file_extensions()

roundtrip_update_mapping(roundtrip_data: Data, mergeable_data: MutableMapping[str, object], *, _recursive_update_mapping: Callable[[Data, MutableMapping[str, object]], None] | None = None, _recursive_update_sequence: Callable[[MutableSequence[object], Sequence[object]], None] | None = None) -> None

Update a mapping without losing recursively attached metadata.

Source code in configzen/data.py
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
def roundtrip_update_mapping(
    roundtrip_data: Data,
    mergeable_data: MutableMapping[str, object],
    *,
    _recursive_update_mapping: Callable[[Data, MutableMapping[str, object]], None]
    | None = None,
    _recursive_update_sequence: Callable[
        [MutableSequence[object], Sequence[object]],
        None,
    ]
    | None = None,
) -> None:
    """Update a mapping without losing recursively attached metadata."""
    if _recursive_update_mapping is None:
        _recursive_update_mapping = partial(
            roundtrip_update_mapping,
            _recursive_update_sequence=_recursive_update_sequence,
        )
    if _recursive_update_sequence is None:
        _recursive_update_sequence = partial(
            roundtrip_update_sequence,
            _recursive_update_mapping=_recursive_update_mapping,
        )
    for key, value in roundtrip_data.items():
        if key in mergeable_data:
            new_value = mergeable_data.pop(key)
            if isinstance(value, MutableMapping):
                # Coerce it's a dict to ensure it has the .pop() method
                _recursive_update_mapping(
                    value,
                    dict(cast("Mapping[str, object]", new_value)),
                )
            elif isinstance(value, MutableSequence):
                _recursive_update_sequence(
                    value,
                    cast("MutableSequence[object]", new_value),
                )
            else:
                roundtrip_data[key] = new_value
    for key, value in mergeable_data.items():
        roundtrip_data[key] = value

roundtrip_update_sequence(roundtrip_data: MutableSequence[object], mergeable_data: Sequence[object], *, _recursive_update_mapping: Callable[[Data, MutableMapping[str, object]], None] | None = None, _recursive_update_sequence: Callable[[MutableSequence[object], Sequence[object]], None] | None = None) -> None

Update a sequence without losing recursively attached metadata.

Source code in configzen/data.py
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
def roundtrip_update_sequence(
    roundtrip_data: MutableSequence[object],
    mergeable_data: Sequence[object],
    *,
    _recursive_update_mapping: Callable[[Data, MutableMapping[str, object]], None]
    | None = None,
    _recursive_update_sequence: Callable[
        [MutableSequence[object], Sequence[object]],
        None,
    ]
    | None = None,
) -> None:
    """Update a sequence without losing recursively attached metadata."""
    if _recursive_update_mapping is None:
        _recursive_update_mapping = partial(
            roundtrip_update_mapping,
            _recursive_update_sequence=_recursive_update_sequence,
        )
    if _recursive_update_sequence is None:
        _recursive_update_sequence = partial(
            roundtrip_update_sequence,
            _recursive_update_mapping=_recursive_update_mapping,
        )
    sequence_length = len(mergeable_data)
    for i, (roundtrip_item, mergeable_item) in enumerate(
        zip_longest(
            roundtrip_data,
            mergeable_data,
        ),
    ):
        if i >= sequence_length:
            roundtrip_data[i] = roundtrip_item
        elif isinstance(roundtrip_item, MutableMapping):
            _recursive_update_mapping(
                roundtrip_item,
                dict(cast("Mapping[str, object]", mergeable_item)),
            )
        elif isinstance(roundtrip_item, MutableSequence):
            _recursive_update_sequence(
                roundtrip_item,
                cast("list[object]", mergeable_item),
            )

errors

Specialized exceptions raised by configzen.

ConfigError

Bases: Exception

Base class for all errors related to configzen.

Source code in configzen/errors.py
23
24
25
26
27
class ConfigError(Exception):
    """Base class for all errors related to configzen."""

    def __init__(self, message: str) -> None:
        super().__init__(message)

ConfigLoadError

Bases: ConfigError

Raised when the configuration cannot be loaded.

Source code in configzen/errors.py
30
31
class ConfigLoadError(ConfigError):
    """Raised when the configuration cannot be loaded."""

ConfigReloadError

Bases: ConfigLoadError

Raised when the configuration cannot be reloaded.

Source code in configzen/errors.py
34
35
class ConfigReloadError(ConfigLoadError):
    """Raised when the configuration cannot be reloaded."""

ConfigSaveError

Bases: ConfigError

Raised when the configuration cannot be saved.

Source code in configzen/errors.py
38
39
class ConfigSaveError(ConfigError):
    """Raised when the configuration cannot be saved."""

NotAMappingError

Bases: ConfigLoadError, TypeError

Raised when the configuration being loaded is not a mapping.

Source code in configzen/errors.py
42
43
class NotAMappingError(ConfigLoadError, TypeError):
    """Raised when the configuration being loaded is not a mapping."""

ConfigProcessorError

Bases: ConfigError

Raised when a configuration replacement processor error occurs.

Source code in configzen/errors.py
46
47
class ConfigProcessorError(ConfigError):
    """Raised when a configuration replacement processor error occurs."""

BaseRouteError

Bases: ConfigError, ValueError

Raised when a configuration item route is invalid.

Source code in configzen/errors.py
50
51
class BaseRouteError(ConfigError, ValueError):
    """Raised when a configuration item route is invalid."""

RouteError

Bases: BaseRouteError

Raised when a configuration item route is invalid at a specific index.

Source code in configzen/errors.py
54
55
56
57
58
59
60
61
62
63
64
class RouteError(BaseRouteError):
    """Raised when a configuration item route is invalid at a specific index."""

    def __init__(self, message: str, route: str, index: int) -> None:
        self.message = message
        self.route = route
        self.index = index

    def __str__(self) -> str:
        """Return a string representation of the route error."""
        return f"{self.message} ({self.route}:{self.index})"

__str__() -> str

Return a string representation of the route error.

Source code in configzen/errors.py
62
63
64
def __str__(self) -> str:
    """Return a string representation of the route error."""
    return f"{self.message} ({self.route}:{self.index})"

LinkedRouteError

Bases: BaseRouteError

Raised when a declared configuration item route is invalid.

Source code in configzen/errors.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
class LinkedRouteError(BaseRouteError):
    """Raised when a declared configuration item route is invalid."""

    def __init__(
        self,
        message: str,
        route: str,
        config_class: type[BaseConfig],
    ) -> None:
        self.message = message
        self.route = route
        self.config_class = config_class

    def __str__(self) -> str:
        """Return a string representation of the route error."""
        return f"{self.message} ({self.config_class.__name__}.{self.route})"

__str__() -> str

Return a string representation of the route error.

Source code in configzen/errors.py
80
81
82
def __str__(self) -> str:
    """Return a string representation of the route error."""
    return f"{self.message} ({self.config_class.__name__}.{self.route})"

formats

configzen.formats: Data formats supported natively by configzen.

std_json

configzen.formats.std_json: The JSON data format.

JSONOptions

Bases: DataFormatOptions

Prototype of the allowed options for the JSON data format.

Source code in configzen/formats/std_json.py
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
class JSONOptions(DataFormatOptions, total=False):
    """Prototype of the allowed options for the JSON data format."""

    object_hook: Callable[[dict[str, object]], object] | None  # JSONDecoder
    parse_float: Callable[[str], float] | None  # JSONDecoder
    parse_int: Callable[[str], int] | None  # JSONDecoder
    parse_constant: Callable[[str], object] | None  # JSONDecoder
    strict: bool  # JSONDecoder
    object_pairs_hook: (
        Callable[
            [list[tuple[str, object]]],
            object,
        ]
        | None
    )  # JSONDecoder

    skipkeys: bool  # JSONEncoder
    ensure_ascii: bool  # JSONEncoder
    check_circular: bool  # JSONEncoder
    allow_nan: bool  # JSONEncoder
    sort_keys: bool  # JSONEncoder
    indent: int | str | None  # JSONEncoder
    separators: tuple[str, str] | None  # JSONEncoder
    default: Callable[..., object] | None  # JSONEncoder

JSONDataFormat

Bases: TextDataFormat[JSONOptions]

The JSON data format.

Source code in configzen/formats/std_json.py
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
@runtime_generic
class JSONDataFormat(TextDataFormat[JSONOptions]):
    """The JSON data format."""

    option_name: ClassVar[str] = "json"

    # Subclass and override for global effect.
    json_encoder: JSONEncoder = JSONEncoder()
    json_decoder: JSONDecoder = JSONDecoder()

    default_extension: ClassVar[str] = "json"

    def configure(self, **options: Unpack[JSONOptions]) -> None:
        """For the documentation of the options, see the JSONOptions class."""
        self.json_encoder = JSONEncoder(
            skipkeys=options.get("skipkeys") or self.json_encoder.skipkeys,
            ensure_ascii=options.get("ensure_ascii") or self.json_encoder.ensure_ascii,
            check_circular=options.get("check_circular")
            or self.json_encoder.check_circular,
            allow_nan=options.get("allow_nan") or self.json_encoder.allow_nan,
            indent=options.get("indent") or self.json_encoder.indent,
            separators=options.get("separators")
            or (
                self.json_encoder.item_separator,
                self.json_encoder.key_separator,
            ),
            default=options.get("default") or self.json_encoder.default,
        )
        self.json_decoder = JSONDecoder(
            object_hook=options.get("object_hook") or self.json_decoder.object_hook,
            parse_float=options.get("parse_float") or self.json_decoder.parse_float,
            parse_int=options.get("parse_int") or self.json_decoder.parse_int,
            parse_constant=options.get("parse_constant")
            or self.json_decoder.parse_constant,
            strict=options.get("strict") or self.json_decoder.strict,
            object_pairs_hook=options.get("object_pairs_hook")
            or self.json_decoder.object_pairs_hook,
        )

    def load(self, stream: IO[str]) -> Data:
        """Load the JSON data from the given stream."""
        document = (
            load(
                stream,
                cls=cast("type[JSONDecoder]", lambda **_: self.json_decoder),
            )
            or {}
        )
        if not isinstance(document, dict):
            msg = (
                f"Expected a dict mapping, "
                f"but got {type(document).__name__} instead."
            )
            raise TypeError(msg)
        return document

    def dump(self, data: Data, stream: IO[str]) -> None:
        """Dump the given JSON data to the given stream."""
        dump(
            data,
            stream,
            cls=cast("type[JSONEncoder]", lambda **_: self.json_encoder),
        )
configure(**options: Unpack[JSONOptions]) -> None

For the documentation of the options, see the JSONOptions class.

Source code in configzen/formats/std_json.py
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
def configure(self, **options: Unpack[JSONOptions]) -> None:
    """For the documentation of the options, see the JSONOptions class."""
    self.json_encoder = JSONEncoder(
        skipkeys=options.get("skipkeys") or self.json_encoder.skipkeys,
        ensure_ascii=options.get("ensure_ascii") or self.json_encoder.ensure_ascii,
        check_circular=options.get("check_circular")
        or self.json_encoder.check_circular,
        allow_nan=options.get("allow_nan") or self.json_encoder.allow_nan,
        indent=options.get("indent") or self.json_encoder.indent,
        separators=options.get("separators")
        or (
            self.json_encoder.item_separator,
            self.json_encoder.key_separator,
        ),
        default=options.get("default") or self.json_encoder.default,
    )
    self.json_decoder = JSONDecoder(
        object_hook=options.get("object_hook") or self.json_decoder.object_hook,
        parse_float=options.get("parse_float") or self.json_decoder.parse_float,
        parse_int=options.get("parse_int") or self.json_decoder.parse_int,
        parse_constant=options.get("parse_constant")
        or self.json_decoder.parse_constant,
        strict=options.get("strict") or self.json_decoder.strict,
        object_pairs_hook=options.get("object_pairs_hook")
        or self.json_decoder.object_pairs_hook,
    )
load(stream: IO[str]) -> Data

Load the JSON data from the given stream.

Source code in configzen/formats/std_json.py
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
def load(self, stream: IO[str]) -> Data:
    """Load the JSON data from the given stream."""
    document = (
        load(
            stream,
            cls=cast("type[JSONDecoder]", lambda **_: self.json_decoder),
        )
        or {}
    )
    if not isinstance(document, dict):
        msg = (
            f"Expected a dict mapping, "
            f"but got {type(document).__name__} instead."
        )
        raise TypeError(msg)
    return document
dump(data: Data, stream: IO[str]) -> None

Dump the given JSON data to the given stream.

Source code in configzen/formats/std_json.py
108
109
110
111
112
113
114
def dump(self, data: Data, stream: IO[str]) -> None:
    """Dump the given JSON data to the given stream."""
    dump(
        data,
        stream,
        cls=cast("type[JSONEncoder]", lambda **_: self.json_encoder),
    )

std_plist

configzen.formats.std_plist: The Plist data format.

PlistOptions

Bases: DataFormatOptions

Prototype of the allowed options for the Plist data format.

Source code in configzen/formats/std_plist.py
26
27
28
29
30
31
32
class PlistOptions(DataFormatOptions, total=False):
    """Prototype of the allowed options for the Plist data format."""

    fmt: PlistFormat
    dict_type: type[MutableMapping[str, Any]]
    sort_keys: bool
    skipkeys: bool

PlistDataFormat

Bases: BinaryDataFormat[PlistOptions]

The Plist data format.

Source code in configzen/formats/std_plist.py
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
@runtime_generic
class PlistDataFormat(BinaryDataFormat[PlistOptions]):
    """The Plist data format."""

    option_name: ClassVar[str] = "plist"

    # Subclass and override for global effect.
    plist_options: PlistOptions = PlistOptions()

    default_extension: ClassVar[str] = "plist"

    def configure(self, **options: Unpack[PlistOptions]) -> None:
        """For the documentation of the options, see the PlistOptions class."""
        if "fmt" not in options:
            options["fmt"] = self.plist_options.get("fmt", PlistFormat.FMT_XML)
        if "dict_type" not in options:
            options["dict_type"] = self.plist_options.get(
                "dict_type",
                dict,
            )  # type: ignore[typeddict-item]
        if "sort_keys" not in options:
            # configzen focuses on preserving the original structure,
            # so we don't sort by default.
            options["sort_keys"] = self.plist_options.get("sort_keys", False)
        if "skipkeys" not in options:
            options["skipkeys"] = self.plist_options.get("skipkeys", False)
        self.plist_options = options

    def load(self, stream: IO[bytes]) -> Data:
        """Load the data from the given stream."""
        dict_class: type[MutableMapping[str, Any]] = self.plist_options["dict_type"]
        document = (
            load(
                stream,
                fmt=self.plist_options["fmt"],
                dict_type=dict_class,
            )
            or dict_class()
        )
        if not isinstance(document, dict_class):
            msg = (
                f"Expected a {dict_class.__name__} mapping, "
                f"but got {type(document).__name__} instead."
            )
            raise TypeError(msg)
        return document

    def dump(self, data: Data, stream: IO[bytes]) -> None:
        """Dump the given data to the stream."""
        dump(
            data,
            stream,
            fmt=self.plist_options["fmt"],
            sort_keys=self.plist_options["sort_keys"],
            skipkeys=self.plist_options["skipkeys"],
        )
configure(**options: Unpack[PlistOptions]) -> None

For the documentation of the options, see the PlistOptions class.

Source code in configzen/formats/std_plist.py
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
def configure(self, **options: Unpack[PlistOptions]) -> None:
    """For the documentation of the options, see the PlistOptions class."""
    if "fmt" not in options:
        options["fmt"] = self.plist_options.get("fmt", PlistFormat.FMT_XML)
    if "dict_type" not in options:
        options["dict_type"] = self.plist_options.get(
            "dict_type",
            dict,
        )  # type: ignore[typeddict-item]
    if "sort_keys" not in options:
        # configzen focuses on preserving the original structure,
        # so we don't sort by default.
        options["sort_keys"] = self.plist_options.get("sort_keys", False)
    if "skipkeys" not in options:
        options["skipkeys"] = self.plist_options.get("skipkeys", False)
    self.plist_options = options
load(stream: IO[bytes]) -> Data

Load the data from the given stream.

Source code in configzen/formats/std_plist.py
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
def load(self, stream: IO[bytes]) -> Data:
    """Load the data from the given stream."""
    dict_class: type[MutableMapping[str, Any]] = self.plist_options["dict_type"]
    document = (
        load(
            stream,
            fmt=self.plist_options["fmt"],
            dict_type=dict_class,
        )
        or dict_class()
    )
    if not isinstance(document, dict_class):
        msg = (
            f"Expected a {dict_class.__name__} mapping, "
            f"but got {type(document).__name__} instead."
        )
        raise TypeError(msg)
    return document
dump(data: Data, stream: IO[bytes]) -> None

Dump the given data to the stream.

Source code in configzen/formats/std_plist.py
82
83
84
85
86
87
88
89
90
def dump(self, data: Data, stream: IO[bytes]) -> None:
    """Dump the given data to the stream."""
    dump(
        data,
        stream,
        fmt=self.plist_options["fmt"],
        sort_keys=self.plist_options["sort_keys"],
        skipkeys=self.plist_options["skipkeys"],
    )

toml

configzen.formats.toml: The TOML data format.

TOMLOptions

Bases: DataFormatOptions

Prototype of the allowed options for the TOML data format.

Source code in configzen/formats/toml.py
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
class TOMLOptions(DataFormatOptions, total=False):
    """Prototype of the allowed options for the TOML data format."""

    encoders: list[Encoder]
    """List of encoders to perform automatic tomlkit.register_encoder() calls on."""

    unregister_old_encoders: bool
    """
    Whether to unregister all previously registered encoders
    before registering the new ones.
    """

    sort_keys: bool
    """
    Whether to sort keys in the output.
    """
encoders: list[Encoder] instance-attribute

List of encoders to perform automatic tomlkit.register_encoder() calls on.

unregister_old_encoders: bool instance-attribute

Whether to unregister all previously registered encoders before registering the new ones.

sort_keys: bool instance-attribute

Whether to sort keys in the output.

TOMLDataFormat

Bases: TextDataFormat[TOMLOptions]

The TOML data format.

Source code in configzen/formats/toml.py
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
@runtime_generic
class TOMLDataFormat(TextDataFormat[TOMLOptions]):
    """The TOML data format."""

    option_name: ClassVar[str] = "toml"

    # Subclass and override for global effect.
    toml_options: TOMLOptions = TOMLOptions()

    default_extension: ClassVar[str] = "toml"
    file_extensions: ClassVar[set[str]] = {"ini", "conf"}

    def configure(self, **options: Unpack[TOMLOptions]) -> None:
        """For the documentation of the options, see the TOMLOptions class."""
        old_options = self.toml_options
        toml_encoders = options.get("encoders") or old_options.get("encoders") or []
        cleanup_old_encoders = options.get("cleanup_old_encoders", False)

        if cleanup_old_encoders:
            for encoder in self.toml_options.get("encoders") or []:
                unregister_encoder(encoder)

        for encoder in toml_encoders:
            register_encoder(encoder)

    def load(self, stream: IO[str]) -> Data:
        """Load the data from the given stream."""
        return load(stream)

    def dump(self, data: Data, stream: IO[str]) -> None:
        """Dump the data to the given stream."""
        dump(
            data,
            stream,
            sort_keys=self.toml_options.get("sort_keys", False),
        )
configure(**options: Unpack[TOMLOptions]) -> None

For the documentation of the options, see the TOMLOptions class.

Source code in configzen/formats/toml.py
55
56
57
58
59
60
61
62
63
64
65
66
def configure(self, **options: Unpack[TOMLOptions]) -> None:
    """For the documentation of the options, see the TOMLOptions class."""
    old_options = self.toml_options
    toml_encoders = options.get("encoders") or old_options.get("encoders") or []
    cleanup_old_encoders = options.get("cleanup_old_encoders", False)

    if cleanup_old_encoders:
        for encoder in self.toml_options.get("encoders") or []:
            unregister_encoder(encoder)

    for encoder in toml_encoders:
        register_encoder(encoder)
load(stream: IO[str]) -> Data

Load the data from the given stream.

Source code in configzen/formats/toml.py
68
69
70
def load(self, stream: IO[str]) -> Data:
    """Load the data from the given stream."""
    return load(stream)
dump(data: Data, stream: IO[str]) -> None

Dump the data to the given stream.

Source code in configzen/formats/toml.py
72
73
74
75
76
77
78
def dump(self, data: Data, stream: IO[str]) -> None:
    """Dump the data to the given stream."""
    dump(
        data,
        stream,
        sort_keys=self.toml_options.get("sort_keys", False),
    )

yaml

configzen.formats.yaml: The YAML data format.

YAMLOptions

Bases: DataFormatOptions

Prototype of the allowed options for the YAML data format.

For more information, see the documentation of the ruamel.yaml.YAML class.

Source code in configzen/formats/yaml.py
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
class YAMLOptions(DataFormatOptions, total=False):
    """
    Prototype of the allowed options for the YAML data format.

    For more information, see the documentation of the `ruamel.yaml.YAML` class.
    """

    typ: YAMLTyp | list[YAMLTyp] | None
    pure: bool
    plug_ins: list[str]

    classes: list[type]
    """List of classes to automatically call YAML.register_class() on."""

    version: str | tuple[int | str, ...]
    """The YAML version to use."""

    indent: int
    """Indentation width."""

    block_seq_indent: int
    """Indentation for nested block sequences."""
classes: list[type] instance-attribute

List of classes to automatically call YAML.register_class() on.

version: str | tuple[int | str, ...] instance-attribute

The YAML version to use.

indent: int instance-attribute

Indentation width.

block_seq_indent: int instance-attribute

Indentation for nested block sequences.

YAMLDataFormat

Bases: TextDataFormat[YAMLOptions]

The YAML data format.

Source code in configzen/formats/yaml.py
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
@runtime_generic
class YAMLDataFormat(TextDataFormat[YAMLOptions]):
    """The YAML data format."""

    option_name: ClassVar[str] = "yaml"

    # Subclass and override for global effect.
    yaml: YAML = YAML()

    default_extension: ClassVar[str] = "yml"
    file_extensions: ClassVar[set[str]] = {"yaml"}

    def configure(self, **options: Unpack[YAMLOptions]) -> None:
        """For the documentation of the options, see the YAMLOptions class."""
        yaml_classes = options.pop("classes", None) or []

        old_yaml = self.yaml
        yaml_version = options.pop("version", None) or old_yaml.version
        yaml_indent = options.pop("indent", None) or old_yaml.old_indent
        yaml_block_seq_indent = (
            options.pop("block_seq_indent", None) or old_yaml.block_seq_indent
        )

        yaml = YAML(**options)  # type: ignore[arg-type,misc]
        yaml.version = yaml_version  # type: ignore[assignment]
        yaml.indent = yaml_indent
        yaml.block_seq_indent = yaml_block_seq_indent

        for cls in yaml_classes:
            yaml.register_class(cls)

        self.yaml = yaml

    def load(self, stream: IO[str]) -> Data:
        """
        Load the data from a stream.

        Return a mutable mapping representing the loaded data
        which is mutation-sensitive (for round-trip processing).

        Every configuration source transforms the input data into a stream
        to be processed by the data format, because most data format libraries
        operate on streams.

        This method is called by the configuration model.
        """
        data = self.yaml.load(stream) or CommentedMap()
        if not isinstance(data, dict):
            msg = f"Expected a dict, but got {type(data).__name__}."
            raise TypeError(msg)
        return data

    def dump(self, data: Data, stream: IO[str]) -> None:
        """
        Load the data from a stream.

        Every configuration source transforms the input data into a stream
        to be processed by the data format, because most data format libraries
        operate on streams.

        This method is called by the configuration model.
        """
        self.yaml.dump(data, stream)
configure(**options: Unpack[YAMLOptions]) -> None

For the documentation of the options, see the YAMLOptions class.

Source code in configzen/formats/yaml.py
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
def configure(self, **options: Unpack[YAMLOptions]) -> None:
    """For the documentation of the options, see the YAMLOptions class."""
    yaml_classes = options.pop("classes", None) or []

    old_yaml = self.yaml
    yaml_version = options.pop("version", None) or old_yaml.version
    yaml_indent = options.pop("indent", None) or old_yaml.old_indent
    yaml_block_seq_indent = (
        options.pop("block_seq_indent", None) or old_yaml.block_seq_indent
    )

    yaml = YAML(**options)  # type: ignore[arg-type,misc]
    yaml.version = yaml_version  # type: ignore[assignment]
    yaml.indent = yaml_indent
    yaml.block_seq_indent = yaml_block_seq_indent

    for cls in yaml_classes:
        yaml.register_class(cls)

    self.yaml = yaml
load(stream: IO[str]) -> Data

Load the data from a stream.

Return a mutable mapping representing the loaded data which is mutation-sensitive (for round-trip processing).

Every configuration source transforms the input data into a stream to be processed by the data format, because most data format libraries operate on streams.

This method is called by the configuration model.

Source code in configzen/formats/yaml.py
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
def load(self, stream: IO[str]) -> Data:
    """
    Load the data from a stream.

    Return a mutable mapping representing the loaded data
    which is mutation-sensitive (for round-trip processing).

    Every configuration source transforms the input data into a stream
    to be processed by the data format, because most data format libraries
    operate on streams.

    This method is called by the configuration model.
    """
    data = self.yaml.load(stream) or CommentedMap()
    if not isinstance(data, dict):
        msg = f"Expected a dict, but got {type(data).__name__}."
        raise TypeError(msg)
    return data
dump(data: Data, stream: IO[str]) -> None

Load the data from a stream.

Every configuration source transforms the input data into a stream to be processed by the data format, because most data format libraries operate on streams.

This method is called by the configuration model.

Source code in configzen/formats/yaml.py
106
107
108
109
110
111
112
113
114
115
116
def dump(self, data: Data, stream: IO[str]) -> None:
    """
    Load the data from a stream.

    Every configuration source transforms the input data into a stream
    to be processed by the data format, because most data format libraries
    operate on streams.

    This method is called by the configuration model.
    """
    self.yaml.dump(data, stream)

module_proxy

Runtime modules with attribute type validation.

ModuleProxy

Bases: ModuleType, Generic[ConfigObject]

Proxy object that extends a runtime module with type validation.

Triggered via a config instance (initialization and assignment).

Parameters:

Name Type Description Default
name str

The name of the module.

required
config ConfigObject

The configuration model to use for type validation.

required
module_namespace dict[str, Any] | None

The module namespace to wrap.

None
Source code in configzen/module_proxy.py
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
class ModuleProxy(types.ModuleType, Generic[ConfigObject]):
    """
    Proxy object that extends a runtime module with type validation.

    Triggered via a config instance (initialization and assignment).

    Parameters
    ----------
    name
        The name of the module.
    config
        The configuration model to use for type validation.
    module_namespace
        The module namespace to wrap.

    """

    __config__: ConfigObject
    __locals__: dict[str, Any]

    def __init__(
        self,
        name: str,
        config: ConfigObject,
        module_namespace: dict[str, Any] | None = None,
        doc: str | None = None,
    ) -> None:
        object.__setattr__(self, "__config__", config)
        object.__setattr__(self, "__locals__", module_namespace or {})
        object.__setattr__(config, "__wrapped_module__", self)

        super().__init__(name=name, doc=doc)

        parts = name.split(".")
        if len(parts) > 1:
            # Set the proxy module as an attribute of its parent.
            parent = sys.modules[".".join(parts[:-1])]
            setattr(parent, parts[-1], self)

        # Make reusable.
        sys.modules[name] = self

    def __getattribute__(self, name: str) -> Any:
        """Get an attribute of the underlying model."""
        if _is_dunder(name):
            return object.__getattribute__(self, name)

        config = self.__config__
        try:
            return getattr(config, name)
        except AttributeError:
            try:
                return self.__locals__[name]
            except KeyError:
                return object.__getattribute__(self, name)

    def __setattr__(self, key: str, value: Any) -> None:
        """Set an attribute on the underlying model."""
        config = self.get_config()
        if not _is_dunder(key) and key in config.model_fields:
            setattr(config, key, value)
        self.__locals__[key] = value

    def __repr__(self) -> str:
        """
        Get the string representation of the module proxy.

        Inform the user that this is a configuration module.
        """
        return super().__repr__().replace("module", "configuration module", 1)

    def get_config(self) -> ConfigObject:
        """Get the configuration model."""
        return self.__config__

    @classmethod
    def wrap_module(
        cls,
        module_name: str,
        config_class: type[ConfigObject] | None = None,
        namespace: dict[str, Any] | None = None,
        /,
        **values: Any,
    ) -> ModuleProxy[ConfigObject]:
        """
        Wrap a module to ensure type validation.

        Every attribute of the wrapped module that is also a field of the config will be
        validated against it. The module will be extended with the config's attributes.
        Assignments on the module's attributes will be propagated to the configuration
        instance. It could be said that the module becomes a proxy for the configuration
        once wrapped.

        Parameters
        ----------
        module_name
            The name of the module to wrap.
        config_class
            The config class to use for type validation.
        namespace
            The namespace of the module to wrap. If not provided, it will be
            retrieved from `sys.modules`.
        values
            Values used to initialize the config.

        Returns
        -------
        The wrapped module.

        """
        from configzen.config import BaseConfig

        if namespace is None:
            module_namespace = vars(sys.modules[module_name])
        else:
            module_namespace = namespace

        if config_class is None:

            class ConfigModule(BaseConfig):
                __module__ = module_name
                __annotations__ = module_namespace["__annotations__"]
                for key in __annotations__:
                    locals()[key] = module_namespace[key]

            config_class = cast("type[ConfigObject]", ConfigModule)

        module_values = {}
        field_names = frozenset(
            field_info.validation_alias
            or field_info.alias
            or field_info.title
            or field_name
            for field_name, field_info in config_class.model_fields.items()
        )
        for key, value in module_namespace.items():
            if key in field_names:
                module_values[key] = value
        config = config_class.model_validate({**module_values, **values})

        return cls(
            config=config,
            module_namespace=module_namespace,
            name=module_namespace.get("__name__") or module_name,
            doc=module_namespace.get("__doc__"),
        )

    @classmethod
    def wrap_this_module(
        cls,
        config_class: type[ConfigObject] | None = None,
        /,
        **values: Any,
    ) -> ModuleProxy[ConfigObject]:
        """
        Wrap the module calling this function.

        For more information on wrapping modules, see `ModuleProxy.wrap_module()`.

        Parameters
        ----------
        config_class
            The config class to use for type validation.
        values
            Values used to initialize the config.

        """
        current_frame = inspect.currentframe()
        if current_frame is None:
            msg = "Could not get the current frame"
            raise RuntimeError(msg)
        frame_back = current_frame.f_back
        if frame_back is None:
            msg = "Could not get the frame back"
            raise RuntimeError(msg)
        return cls.wrap_module(
            {**frame_back.f_globals, **frame_back.f_locals}["__name__"],
            config_class,
            {**frame_back.f_locals, **values},
        )

__getattribute__(name: str) -> Any

Get an attribute of the underlying model.

Source code in configzen/module_proxy.py
67
68
69
70
71
72
73
74
75
76
77
78
79
def __getattribute__(self, name: str) -> Any:
    """Get an attribute of the underlying model."""
    if _is_dunder(name):
        return object.__getattribute__(self, name)

    config = self.__config__
    try:
        return getattr(config, name)
    except AttributeError:
        try:
            return self.__locals__[name]
        except KeyError:
            return object.__getattribute__(self, name)

__setattr__(key: str, value: Any) -> None

Set an attribute on the underlying model.

Source code in configzen/module_proxy.py
81
82
83
84
85
86
def __setattr__(self, key: str, value: Any) -> None:
    """Set an attribute on the underlying model."""
    config = self.get_config()
    if not _is_dunder(key) and key in config.model_fields:
        setattr(config, key, value)
    self.__locals__[key] = value

__repr__() -> str

Get the string representation of the module proxy.

Inform the user that this is a configuration module.

Source code in configzen/module_proxy.py
88
89
90
91
92
93
94
def __repr__(self) -> str:
    """
    Get the string representation of the module proxy.

    Inform the user that this is a configuration module.
    """
    return super().__repr__().replace("module", "configuration module", 1)

get_config() -> ConfigObject

Get the configuration model.

Source code in configzen/module_proxy.py
96
97
98
def get_config(self) -> ConfigObject:
    """Get the configuration model."""
    return self.__config__

wrap_module(module_name: str, config_class: type[ConfigObject] | None = None, namespace: dict[str, Any] | None = None, /, **values: Any) -> ModuleProxy[ConfigObject] classmethod

Wrap a module to ensure type validation.

Every attribute of the wrapped module that is also a field of the config will be validated against it. The module will be extended with the config's attributes. Assignments on the module's attributes will be propagated to the configuration instance. It could be said that the module becomes a proxy for the configuration once wrapped.

Parameters:

Name Type Description Default
module_name str

The name of the module to wrap.

required
config_class type[ConfigObject] | None

The config class to use for type validation.

None
namespace dict[str, Any] | None

The namespace of the module to wrap. If not provided, it will be retrieved from sys.modules.

None
values Any

Values used to initialize the config.

{}

Returns:

Type Description
The wrapped module.
Source code in configzen/module_proxy.py
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
@classmethod
def wrap_module(
    cls,
    module_name: str,
    config_class: type[ConfigObject] | None = None,
    namespace: dict[str, Any] | None = None,
    /,
    **values: Any,
) -> ModuleProxy[ConfigObject]:
    """
    Wrap a module to ensure type validation.

    Every attribute of the wrapped module that is also a field of the config will be
    validated against it. The module will be extended with the config's attributes.
    Assignments on the module's attributes will be propagated to the configuration
    instance. It could be said that the module becomes a proxy for the configuration
    once wrapped.

    Parameters
    ----------
    module_name
        The name of the module to wrap.
    config_class
        The config class to use for type validation.
    namespace
        The namespace of the module to wrap. If not provided, it will be
        retrieved from `sys.modules`.
    values
        Values used to initialize the config.

    Returns
    -------
    The wrapped module.

    """
    from configzen.config import BaseConfig

    if namespace is None:
        module_namespace = vars(sys.modules[module_name])
    else:
        module_namespace = namespace

    if config_class is None:

        class ConfigModule(BaseConfig):
            __module__ = module_name
            __annotations__ = module_namespace["__annotations__"]
            for key in __annotations__:
                locals()[key] = module_namespace[key]

        config_class = cast("type[ConfigObject]", ConfigModule)

    module_values = {}
    field_names = frozenset(
        field_info.validation_alias
        or field_info.alias
        or field_info.title
        or field_name
        for field_name, field_info in config_class.model_fields.items()
    )
    for key, value in module_namespace.items():
        if key in field_names:
            module_values[key] = value
    config = config_class.model_validate({**module_values, **values})

    return cls(
        config=config,
        module_namespace=module_namespace,
        name=module_namespace.get("__name__") or module_name,
        doc=module_namespace.get("__doc__"),
    )

wrap_this_module(config_class: type[ConfigObject] | None = None, /, **values: Any) -> ModuleProxy[ConfigObject] classmethod

Wrap the module calling this function.

For more information on wrapping modules, see ModuleProxy.wrap_module().

Parameters:

Name Type Description Default
config_class type[ConfigObject] | None

The config class to use for type validation.

None
values Any

Values used to initialize the config.

{}
Source code in configzen/module_proxy.py
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
@classmethod
def wrap_this_module(
    cls,
    config_class: type[ConfigObject] | None = None,
    /,
    **values: Any,
) -> ModuleProxy[ConfigObject]:
    """
    Wrap the module calling this function.

    For more information on wrapping modules, see `ModuleProxy.wrap_module()`.

    Parameters
    ----------
    config_class
        The config class to use for type validation.
    values
        Values used to initialize the config.

    """
    current_frame = inspect.currentframe()
    if current_frame is None:
        msg = "Could not get the current frame"
        raise RuntimeError(msg)
    frame_back = current_frame.f_back
    if frame_back is None:
        msg = "Could not get the frame back"
        raise RuntimeError(msg)
    return cls.wrap_module(
        {**frame_back.f_globals, **frame_back.f_locals}["__name__"],
        config_class,
        {**frame_back.f_locals, **values},
    )

processor

Replacement API processor for configuration data.

Allows to tweak the configuration data programmatically before it is given to the model config and revert the changes back to the original data structure when the configuration managed by that model is saved.

ProcessorOptions

Bases: TypedDict

Prototype of the allowed options for the ConfigProcessor class.

Source code in configzen/processor.py
52
53
54
55
56
57
58
class ProcessorOptions(TypedDict, total=False):
    """Prototype of the allowed options for the ConfigProcessor class."""

    macro_prefix: Char
    update_prefix: Char
    macros_on_top: bool
    lenient: bool

ConfigProcessor

A class that takes in configuration data and processes it.

Recursively resolves & applies replacements in data magically.

Source code in configzen/processor.py
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
class ConfigProcessor:
    """
    A class that takes in configuration data and processes it.

    Recursively resolves & applies replacements in data magically.
    """

    _get_processed_data: Callable[..., _ProcessedData] = _ProcessedData

    _macros: ClassVar[MacroDict]

    def __init__(  # noqa: PLR0913
        self,
        initial: Data,
        *,
        macro_prefix: Char = Char("^"),  # noqa: B008
        update_prefix: Char = Char("+"),  # noqa: B008
        macros_on_top: bool = False,
        lenient: bool = True,
    ) -> None:
        self.__initial = initial
        self.__data: _ProcessedData = None  # type: ignore[assignment]

        self.options = ProcessorOptions(
            macro_prefix=macro_prefix,
            update_prefix=update_prefix,
            macros_on_top=macros_on_top,
            lenient=lenient,
        )

    @property
    def macros(self) -> MacroDict:
        """Get macros bound to this processor."""
        return {
            macro_name: macro.__get__(self, type(self))
            for macro_name, macro in self._macros.items()
        }

    @property
    def roundtrip_initial(self) -> Data:
        """The initial configuration data that the processor was given."""
        return self.__initial

    def create_processor(self, data: Data) -> ConfigProcessor:
        """Create a new configuration processor with identical options."""
        return type(self)(data, **self.options)

    def get_processed_data(
        self,
        *,
        force: bool = False,
    ) -> _ProcessedData:
        """
        Create the data with replacements or return the one already cached.

        Parameters
        ----------
        force
            Whether to forcibly parse the original data even if it was already parsed.
            Default is False.

        """
        if force or self.__data is None:
            self.__data = self._get_processed_data(
                data=self.__initial,
                options=self.options,
                macros=self.macros,
            )
        return self.__data

    def __init_subclass__(cls) -> None:
        """Merge macro registries on subclass."""
        macros_from_class_dict = {
            macro_name: func
            for func in vars(cls).values()
            if (macro_name := getattr(func, MACRO_FUNC, None))
        }
        try:
            macros = {**getattr(cls.__base__, "_macros", {}), **macros_from_class_dict}
        except AttributeError:
            macros = {}
        cls._macros = macros

    @staticmethod
    def sanitize_macro_name(name: str) -> str:
        """Ensure a uniform name of every macro."""
        return name.strip().casefold()

    @classmethod
    def macro(cls, name: str, macro: MacroT) -> MacroT:
        """Override a macro."""
        name = cls.sanitize_macro_name(name)
        cls._macros[name] = macro
        return macro

macros: MacroDict property

Get macros bound to this processor.

roundtrip_initial: Data property

The initial configuration data that the processor was given.

create_processor(data: Data) -> ConfigProcessor

Create a new configuration processor with identical options.

Source code in configzen/processor.py
223
224
225
def create_processor(self, data: Data) -> ConfigProcessor:
    """Create a new configuration processor with identical options."""
    return type(self)(data, **self.options)

get_processed_data(*, force: bool = False) -> _ProcessedData

Create the data with replacements or return the one already cached.

Parameters:

Name Type Description Default
force bool

Whether to forcibly parse the original data even if it was already parsed. Default is False.

False
Source code in configzen/processor.py
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
def get_processed_data(
    self,
    *,
    force: bool = False,
) -> _ProcessedData:
    """
    Create the data with replacements or return the one already cached.

    Parameters
    ----------
    force
        Whether to forcibly parse the original data even if it was already parsed.
        Default is False.

    """
    if force or self.__data is None:
        self.__data = self._get_processed_data(
            data=self.__initial,
            options=self.options,
            macros=self.macros,
        )
    return self.__data

__init_subclass__() -> None

Merge macro registries on subclass.

Source code in configzen/processor.py
250
251
252
253
254
255
256
257
258
259
260
261
def __init_subclass__(cls) -> None:
    """Merge macro registries on subclass."""
    macros_from_class_dict = {
        macro_name: func
        for func in vars(cls).values()
        if (macro_name := getattr(func, MACRO_FUNC, None))
    }
    try:
        macros = {**getattr(cls.__base__, "_macros", {}), **macros_from_class_dict}
    except AttributeError:
        macros = {}
    cls._macros = macros

sanitize_macro_name(name: str) -> str staticmethod

Ensure a uniform name of every macro.

Source code in configzen/processor.py
263
264
265
266
@staticmethod
def sanitize_macro_name(name: str) -> str:
    """Ensure a uniform name of every macro."""
    return name.strip().casefold()

macro(name: str, macro: MacroT) -> MacroT classmethod

Override a macro.

Source code in configzen/processor.py
268
269
270
271
272
273
@classmethod
def macro(cls, name: str, macro: MacroT) -> MacroT:
    """Override a macro."""
    name = cls.sanitize_macro_name(name)
    cls._macros[name] = macro
    return macro

ProcessorReplacement dataclass

A change that was made to the configuration data during processing.

Attributes:

Name Type Description
key str

The key of the item before alteration.

value object

The value of the item before alteration.

content Data

The value to unpack in place of the alteration key.

Source code in configzen/processor.py
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
@dataclass
class ProcessorReplacement:
    """
    A change that was made to the configuration data during processing.

    Attributes
    ----------
    key
        The key of the item before alteration.
    value
        The value of the item before alteration.
    content
        The value to unpack in place of the alteration key.

    """

    key: str
    value: object
    content: Data

FileSystemAwareConfigProcessor

Bases: ConfigProcessor

Config processor that is aware of the file system.

Can handle requests for transcluding other configuration files to achieve a sense of extendability.

Source code in configzen/processor.py
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
class FileSystemAwareConfigProcessor(ConfigProcessor):
    """
    Config processor that is aware of the file system.

    Can handle requests for transcluding other configuration files
    to achieve a sense of extendability.
    """

    @macro
    def extend(self, sources: str | dict[str, str]) -> Data:
        """Transclude a config in this config."""
        if isinstance(sources, str):
            source = get_config_source(sources)
            return source.load()
        return {
            key: get_config_source(source).load() for key, source in sources.items()
        }

extend(sources: str | dict[str, str]) -> Data

Transclude a config in this config.

Source code in configzen/processor.py
334
335
336
337
338
339
340
341
342
@macro
def extend(self, sources: str | dict[str, str]) -> Data:
    """Transclude a config in this config."""
    if isinstance(sources, str):
        source = get_config_source(sources)
        return source.load()
    return {
        key: get_config_source(source).load() for key, source in sources.items()
    }

routes

Routes creation and parsing.

Step

Bases: Generic[_KT]

A configuration route step.

Do not use this class directly. Use GetAttr or GetItem instead.

Source code in configzen/routes.py
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
class Step(Generic[_KT]):
    """
    A configuration route step.

    Do not use this class directly. Use GetAttr or GetItem instead.
    """

    key: _KT

    def __init__(self, key: _KT, /) -> None:
        self.key = key

    def __eq__(self, other: object) -> bool:
        """Compare this step to another step."""
        if isinstance(other, Step):
            return (
                issubclass(type(other), type(self))
                or issubclass(type(self), type(other))
            ) or self.key == other.key
        return NotImplemented

    def get(self, _: Any, /) -> object:
        """Perform a get operation."""
        raise NotImplementedError

    def set(self, _: Any, __: object, /) -> None:
        """Perform a set operation."""
        raise NotImplementedError

    def __call__(self, obj: Any, /) -> object:
        """Perform a get operation."""
        return self.get(obj)

    def __repr__(self) -> str:
        """Represent this step in a string."""
        return f"{type(self).__name__}({self.key!r})"

__eq__(other: object) -> bool

Compare this step to another step.

Source code in configzen/routes.py
62
63
64
65
66
67
68
69
def __eq__(self, other: object) -> bool:
    """Compare this step to another step."""
    if isinstance(other, Step):
        return (
            issubclass(type(other), type(self))
            or issubclass(type(self), type(other))
        ) or self.key == other.key
    return NotImplemented

get(_: Any) -> object

Perform a get operation.

Source code in configzen/routes.py
71
72
73
def get(self, _: Any, /) -> object:
    """Perform a get operation."""
    raise NotImplementedError

set(_: Any, __: object) -> None

Perform a set operation.

Source code in configzen/routes.py
75
76
77
def set(self, _: Any, __: object, /) -> None:
    """Perform a set operation."""
    raise NotImplementedError

__call__(obj: Any) -> object

Perform a get operation.

Source code in configzen/routes.py
79
80
81
def __call__(self, obj: Any, /) -> object:
    """Perform a get operation."""
    return self.get(obj)

__repr__() -> str

Represent this step in a string.

Source code in configzen/routes.py
83
84
85
def __repr__(self) -> str:
    """Represent this step in a string."""
    return f"{type(self).__name__}({self.key!r})"

GetAttr

Bases: Step[str]

A route step that gets an attribute from an object.

The argument is used as an attribute name.

Source code in configzen/routes.py
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
class GetAttr(Step[str]):
    """
    A route step that gets an attribute from an object.

    The argument is used as an attribute name.
    """

    def get(self, target: Any, /) -> object:
        """Get an attribute from an object."""
        return getattr(target, self.key)

    def set(self, target: Any, value: object, /) -> None:
        """Set an attribute in an object."""
        setattr(target, self.key, value)

    def __str__(self) -> str:
        """Compose this step into a string."""
        return str(self.key).replace(Route.TOKEN_DOT, r"\.")

get(target: Any) -> object

Get an attribute from an object.

Source code in configzen/routes.py
95
96
97
def get(self, target: Any, /) -> object:
    """Get an attribute from an object."""
    return getattr(target, self.key)

set(target: Any, value: object) -> None

Set an attribute in an object.

Source code in configzen/routes.py
 99
100
101
def set(self, target: Any, value: object, /) -> None:
    """Set an attribute in an object."""
    setattr(target, self.key, value)

__str__() -> str

Compose this step into a string.

Source code in configzen/routes.py
103
104
105
def __str__(self) -> str:
    """Compose this step into a string."""
    return str(self.key).replace(Route.TOKEN_DOT, r"\.")

GetItem

Bases: Step[Union[int, str]]

A route step that gets an item from an object.

If the argument is a string, it is used checked for being a digit. Unless explicitly escaped, if it is a digit, it is casted to an integer. Otherwise, it is used as is.

Source code in configzen/routes.py
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
class GetItem(Step[Union[int, str]]):
    r"""
    A route step that gets an item from an object.

    If the argument is a string, it is used checked for being a digit.
    Unless explicitly escaped, if it is a digit, it is casted to an integer.
    Otherwise, it is used as is.
    """

    def __init__(self, key: int | str, /, *, ignore_digit: bool = False) -> None:
        self.escape = False
        if isinstance(key, str) and key.isdigit():
            if ignore_digit:
                self.escape = True
            else:
                key = int(key)
        super().__init__(key)

    def get(self, target: Any, /) -> object:
        """Get an item from an object."""
        return target[self.key]

    def set(self, target: Any, value: object, /) -> None:
        """Set an item in an object."""
        target[self.key] = value

    def __str__(self) -> str:
        """Compose this step into a string."""
        argument = str(self.key)
        if self.escape:
            argument = Route.TOKEN_ESCAPE + argument
        return argument.join(
            Route.TOKEN_ENTER + Route.TOKEN_LEAVE,
        ).replace(Route.TOKEN_DOT, r"\.")

get(target: Any) -> object

Get an item from an object.

Source code in configzen/routes.py
126
127
128
def get(self, target: Any, /) -> object:
    """Get an item from an object."""
    return target[self.key]

set(target: Any, value: object) -> None

Set an item in an object.

Source code in configzen/routes.py
130
131
132
def set(self, target: Any, value: object, /) -> None:
    """Set an item in an object."""
    target[self.key] = value

__str__() -> str

Compose this step into a string.

Source code in configzen/routes.py
134
135
136
137
138
139
140
141
def __str__(self) -> str:
    """Compose this step into a string."""
    argument = str(self.key)
    if self.escape:
        argument = Route.TOKEN_ESCAPE + argument
    return argument.join(
        Route.TOKEN_ENTER + Route.TOKEN_LEAVE,
    ).replace(Route.TOKEN_DOT, r"\.")

Route

Routes are, lists of steps that are used to access values in a configuration.

Each step is either a key or an index.

A route can be created from a string, a list of steps, or another route.

Examples:

>>> route = Route("a.b.c")
>>> route
<Route 'a.b.c'>
>>> route.steps
[GetAttr('a'), GetAttr('b'), GetAttr('c')]

Parameters:

Name Type Description Default
route RouteLike

A route to parse.

required
allow_empty bool

Whether to allow empty routes.

False
Source code in configzen/routes.py
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
class Route:
    r"""
    Routes are, lists of steps that are used to access values in a configuration.

    Each step is either a key or an index.

    A route can be created from a string, a list of steps, or another route.

    Examples
    --------
    >>> route = Route("a.b.c")
    >>> route
    <Route 'a.b.c'>
    >>> route.steps
    [GetAttr('a'), GetAttr('b'), GetAttr('c')]

    Parameters
    ----------
    route
        A route to parse.
    allow_empty
        Whether to allow empty routes.

    """

    TOKEN_DOT: ClassVar[str] = "."
    TOKEN_ESCAPE: ClassVar[str] = "\\"
    TOKEN_ENTER: ClassVar[str] = "["
    TOKEN_LEAVE: ClassVar[str] = "]"

    TOKENS: ClassVar[tuple[str, str, str, str]] = (
        TOKEN_DOT,
        TOKEN_ESCAPE,
        TOKEN_ENTER,
        TOKEN_LEAVE,
    )

    def __init__(
        self,
        route: RouteLike,
        *,
        allow_empty: bool = False,
    ) -> None:
        steps = self.parse(route)
        if not (allow_empty or steps):
            msg = "Empty configuration route"
            raise ValueError(msg)
        self.__steps = tuple(steps)

    @property
    def steps(self) -> list[Step[Any]]:
        """Get all steps in this route."""
        return list(self.__steps)

    def __hash__(self) -> int:
        """Get a hash of this route."""
        return hash(self.__steps)

    @classmethod
    def parse(cls, route: RouteLike) -> list[Step[Any]]:
        """
        Parse a route into steps.

        Parameters
        ----------
        route
            The route to parse.

        Returns
        -------
        List of steps.

        """
        if isinstance(route, Step):
            return [route]
        if isinstance(route, Route):
            return route.steps
        if isinstance(route, (tuple, list)):
            patched_route: list[Step[Any]] = []
            for element in route:
                if isinstance(element, (str, int)):
                    try:
                        patched_element = next(iter(cls.parse(element)))
                    except StopIteration:
                        continue
                else:
                    patched_element = element
                patched_route.append(patched_element)
            return patched_route
        if isinstance(route, int):
            return [GetItem(route)]
        if isinstance(route, str):
            return cls.decompose(route)
        msg = f"Invalid route type {type(route)!r}"
        raise TypeError(msg)

    @classmethod
    def decompose(cls, route: str) -> list[Step[Any]]:
        """
        Decompose a route into a list of steps.

        Parameters
        ----------
        route
            A route to decompose.

        Returns
        -------
        List of steps.

        """
        if not route:
            return []

        dot, escape, enter, leave = cls.TOKENS

        return _route_decompose(
            route,
            dot=dot,
            escape=escape,
            enter=enter,
            leave=leave,
        )

    def compose(self) -> str:
        """Compose this route into a string."""
        composed = ""
        steps = self.__steps
        for index, step in enumerate(steps):
            composed += str(step)
            if index < len(steps) - 1:
                ahead = steps[index + 1]
                if isinstance(ahead, GetAttr):
                    composed += self.TOKEN_DOT
        return composed

    def enter(self, subroute: RouteLike) -> Route:
        """
        Enter a subroute.

        Parameters
        ----------
        subroute
            A subroute to enter.

        """
        return type(self)(self.steps + self.parse(subroute))

    def get(self, obj: Any, /) -> object:
        """
        Get an object at the end of this route.

        Parameters
        ----------
        obj
            An object to dive in.

        Returns
        -------
        The result of visiting the object.

        """
        return reduce(lambda obj, step: step(obj), self.__steps, obj)

    def set(self, obj: Any, value: object, /) -> None:
        """
        Set an object pointed to by this route.

        Parameters
        ----------
        obj
            An object to dive in.

        value
            A value to set.

        Returns
        -------
        The result of visiting the object.

        """
        steps = self.steps
        last_step = steps.pop()
        last_step.set(
            reduce(lambda obj, step: step(obj), steps, obj),
            value,
        )

    def __eq__(self, other: object) -> bool:
        """
        Compare this route to another route.

        Parameters
        ----------
        other
            Another route to compare to.

        """
        if isinstance(other, Route):
            return self.steps == other.steps
        if isinstance(other, str):
            return self.steps == self.decompose(other)
        if isinstance(other, (tuple, list)):
            return self.steps == self.parse(other)
        return NotImplemented

    def __str__(self) -> str:
        """Compose this route into a string."""
        return self.compose()

    def __iter__(self) -> Iterator[Step[Any]]:
        """Yield all steps in this route."""
        yield from self.__steps

    def __repr__(self) -> str:
        """Represent this route in a string."""
        return f"<{type(self).__name__} {self.compose()!r}>"

steps: list[Step[Any]] property

Get all steps in this route.

__hash__() -> int

Get a hash of this route.

Source code in configzen/routes.py
295
296
297
def __hash__(self) -> int:
    """Get a hash of this route."""
    return hash(self.__steps)

parse(route: RouteLike) -> list[Step[Any]] classmethod

Parse a route into steps.

Parameters:

Name Type Description Default
route RouteLike

The route to parse.

required

Returns:

Type Description
List of steps.
Source code in configzen/routes.py
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
@classmethod
def parse(cls, route: RouteLike) -> list[Step[Any]]:
    """
    Parse a route into steps.

    Parameters
    ----------
    route
        The route to parse.

    Returns
    -------
    List of steps.

    """
    if isinstance(route, Step):
        return [route]
    if isinstance(route, Route):
        return route.steps
    if isinstance(route, (tuple, list)):
        patched_route: list[Step[Any]] = []
        for element in route:
            if isinstance(element, (str, int)):
                try:
                    patched_element = next(iter(cls.parse(element)))
                except StopIteration:
                    continue
            else:
                patched_element = element
            patched_route.append(patched_element)
        return patched_route
    if isinstance(route, int):
        return [GetItem(route)]
    if isinstance(route, str):
        return cls.decompose(route)
    msg = f"Invalid route type {type(route)!r}"
    raise TypeError(msg)

decompose(route: str) -> list[Step[Any]] classmethod

Decompose a route into a list of steps.

Parameters:

Name Type Description Default
route str

A route to decompose.

required

Returns:

Type Description
List of steps.
Source code in configzen/routes.py
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
@classmethod
def decompose(cls, route: str) -> list[Step[Any]]:
    """
    Decompose a route into a list of steps.

    Parameters
    ----------
    route
        A route to decompose.

    Returns
    -------
    List of steps.

    """
    if not route:
        return []

    dot, escape, enter, leave = cls.TOKENS

    return _route_decompose(
        route,
        dot=dot,
        escape=escape,
        enter=enter,
        leave=leave,
    )

compose() -> str

Compose this route into a string.

Source code in configzen/routes.py
365
366
367
368
369
370
371
372
373
374
375
def compose(self) -> str:
    """Compose this route into a string."""
    composed = ""
    steps = self.__steps
    for index, step in enumerate(steps):
        composed += str(step)
        if index < len(steps) - 1:
            ahead = steps[index + 1]
            if isinstance(ahead, GetAttr):
                composed += self.TOKEN_DOT
    return composed

enter(subroute: RouteLike) -> Route

Enter a subroute.

Parameters:

Name Type Description Default
subroute RouteLike

A subroute to enter.

required
Source code in configzen/routes.py
377
378
379
380
381
382
383
384
385
386
387
def enter(self, subroute: RouteLike) -> Route:
    """
    Enter a subroute.

    Parameters
    ----------
    subroute
        A subroute to enter.

    """
    return type(self)(self.steps + self.parse(subroute))

get(obj: Any) -> object

Get an object at the end of this route.

Parameters:

Name Type Description Default
obj Any

An object to dive in.

required

Returns:

Type Description
The result of visiting the object.
Source code in configzen/routes.py
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
def get(self, obj: Any, /) -> object:
    """
    Get an object at the end of this route.

    Parameters
    ----------
    obj
        An object to dive in.

    Returns
    -------
    The result of visiting the object.

    """
    return reduce(lambda obj, step: step(obj), self.__steps, obj)

set(obj: Any, value: object) -> None

Set an object pointed to by this route.

Parameters:

Name Type Description Default
obj Any

An object to dive in.

required
value object

A value to set.

required

Returns:

Type Description
The result of visiting the object.
Source code in configzen/routes.py
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
def set(self, obj: Any, value: object, /) -> None:
    """
    Set an object pointed to by this route.

    Parameters
    ----------
    obj
        An object to dive in.

    value
        A value to set.

    Returns
    -------
    The result of visiting the object.

    """
    steps = self.steps
    last_step = steps.pop()
    last_step.set(
        reduce(lambda obj, step: step(obj), steps, obj),
        value,
    )

__eq__(other: object) -> bool

Compare this route to another route.

Parameters:

Name Type Description Default
other object

Another route to compare to.

required
Source code in configzen/routes.py
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
def __eq__(self, other: object) -> bool:
    """
    Compare this route to another route.

    Parameters
    ----------
    other
        Another route to compare to.

    """
    if isinstance(other, Route):
        return self.steps == other.steps
    if isinstance(other, str):
        return self.steps == self.decompose(other)
    if isinstance(other, (tuple, list)):
        return self.steps == self.parse(other)
    return NotImplemented

__str__() -> str

Compose this route into a string.

Source code in configzen/routes.py
447
448
449
def __str__(self) -> str:
    """Compose this route into a string."""
    return self.compose()

__iter__() -> Iterator[Step[Any]]

Yield all steps in this route.

Source code in configzen/routes.py
451
452
453
def __iter__(self) -> Iterator[Step[Any]]:
    """Yield all steps in this route."""
    yield from self.__steps

__repr__() -> str

Represent this route in a string.

Source code in configzen/routes.py
455
456
457
def __repr__(self) -> str:
    """Represent this route in a string."""
    return f"<{type(self).__name__} {self.compose()!r}>"

advance_linked_route(_current_head: Type[object], _annotation: Any, _step: Step[object]) -> Any

Move one step forward in a linked route.

Source code in configzen/routes.py
463
464
465
466
467
468
469
470
@class_singledispatch
def advance_linked_route(
    _current_head: Type[object],  # noqa: UP006
    _annotation: Any,
    _step: Step[object],
) -> Any:
    """Move one step forward in a linked route."""
    return _AnyHead

sources

Sources and destinations that hold the configuration data.

ConfigSource

Bases: Generic[SourceType, AnyStr]

Core interface for loading and saving configuration data.

If you need to implement your own configuration source class, implement a subclass of this class and pass in to the .config_load() method of your configuration or its model_config.

Source code in configzen/sources.py
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
@runtime_generic
class ConfigSource(Generic[SourceType, AnyStr], metaclass=ABCMeta):
    """
    Core interface for loading and saving configuration data.

    If you need to implement your own configuration source class,
    implement a subclass of this class and pass in to the `.config_load()` method
    of your configuration or its model_config.
    """

    # Set up temporary stream factories
    _binary_stream_factory: ClassVar[Callable[..., IO[bytes]]] = BytesIO
    _string_stream_factory: ClassVar[Callable[..., IO[str]]] = StringIO
    _data_format: DataFormat[Any, AnyStr]
    source: SourceType
    options: FormatOptions

    def __init__(
        self,
        source: SourceType,
        data_format: str | DataFormat[Any, AnyStr] | None = None,
        **options: Unpack[FormatOptions],
    ) -> None:
        self._temp_stream_factory: Callable[..., IO[AnyStr]] = (
            self._binary_stream_factory
            if self.is_binary()
            else self._string_stream_factory
        )
        self.source = source
        self.options = options
        self.data_format = data_format  # type: ignore[assignment]

    @property
    def data_format(self) -> DataFormat[Any, AnyStr]:
        """The current data format for a configuration source."""
        return self._data_format

    @data_format.setter
    def data_format(self, data_format: str | DataFormat[Any, AnyStr] | None) -> None:
        if data_format is None:
            data_format = self._guess_data_format()
        else:
            data_format = self._make_data_format(data_format)
        data_format.validate_source(self)
        self._data_format = data_format

    def _guess_data_format(self) -> DataFormat[Any, AnyStr]:
        msg = "Cannot guess the data format of the configuration source"
        raise NotImplementedError(msg)

    def _make_data_format(
        self,
        data_format: str | DataFormat[Any, AnyStr],
    ) -> DataFormat[Any, AnyStr]:
        if isinstance(data_format, str):
            return DataFormat.for_extension(
                data_format,
                self.options.get(data_format),  # type: ignore[arg-type]
            )
        data_format.configure(**self.options)  # type: ignore[misc]
        return data_format

    if TYPE_CHECKING:
        # python/mypy#9937
        @overload
        def is_binary(self: ConfigSource[SourceType, str]) -> Literal[False]: ...

        @overload
        def is_binary(
            self: ConfigSource[SourceType, bytes],
        ) -> Literal[True]: ...

    def is_binary(self: ConfigSource[SourceType, AnyStr]) -> bool:
        """Determine whether the configuration source is binary."""
        return not type_check(self, ConfigSource[Any, str])

    @abstractmethod
    def load(self) -> Data:
        """
        Load the configuration source.

        Return its contents as a dictionary.
        """
        raise NotImplementedError

    @abstractmethod
    async def load_async(self) -> Data:
        """
        Load the configuration source asynchronously.

        Return its contents as a dictionary.
        """
        raise NotImplementedError

    @abstractmethod
    def dump(self, data: Data) -> None:
        """Dump the configuration source."""
        raise NotImplementedError

    @abstractmethod
    async def dump_async(self, data: Data) -> int:
        """Dump the configuration source asynchronously."""
        raise NotImplementedError

data_format: DataFormat[Any, AnyStr] property writable

The current data format for a configuration source.

is_binary() -> bool

Determine whether the configuration source is binary.

Source code in configzen/sources.py
132
133
134
def is_binary(self: ConfigSource[SourceType, AnyStr]) -> bool:
    """Determine whether the configuration source is binary."""
    return not type_check(self, ConfigSource[Any, str])

load() -> Data abstractmethod

Load the configuration source.

Return its contents as a dictionary.

Source code in configzen/sources.py
136
137
138
139
140
141
142
143
@abstractmethod
def load(self) -> Data:
    """
    Load the configuration source.

    Return its contents as a dictionary.
    """
    raise NotImplementedError

load_async() -> Data abstractmethod async

Load the configuration source asynchronously.

Return its contents as a dictionary.

Source code in configzen/sources.py
145
146
147
148
149
150
151
152
@abstractmethod
async def load_async(self) -> Data:
    """
    Load the configuration source asynchronously.

    Return its contents as a dictionary.
    """
    raise NotImplementedError

dump(data: Data) -> None abstractmethod

Dump the configuration source.

Source code in configzen/sources.py
154
155
156
157
@abstractmethod
def dump(self, data: Data) -> None:
    """Dump the configuration source."""
    raise NotImplementedError

dump_async(data: Data) -> int abstractmethod async

Dump the configuration source asynchronously.

Source code in configzen/sources.py
159
160
161
162
@abstractmethod
async def dump_async(self, data: Data) -> int:
    """Dump the configuration source asynchronously."""
    raise NotImplementedError

StreamConfigSource

Bases: Generic[AnyStr], ConfigSource[IO[Any], Any]

A configuration source that is a stream.

Parameters:

Name Type Description Default
source IO[AnyStr]

The stream to the configuration source.

required
Source code in configzen/sources.py
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
@runtime_generic
class StreamConfigSource(
    Generic[AnyStr],
    ConfigSource[IO[Any], Any],
):
    """
    A configuration source that is a stream.

    Parameters
    ----------
    source
        The stream to the configuration source.

    """

    def __init__(
        self,
        source: IO[AnyStr],
        data_format: str | DataFormat[Any, AnyStr],
        **options: Unpack[FormatOptions],
    ) -> None:
        super().__init__(source, data_format=data_format, **options)

    def load(self) -> Data:
        """
        Load the configuration source.

        Return its contents as a dictionary.
        """
        return self.data_format.load(self.source)

    def load_async(self) -> Never:
        """Unsupported."""
        msg = "async streams are not supported for `StreamConfigSource`"
        raise NotImplementedError(msg)

    def dump(self, data: Data) -> None:
        """Dump the configuration source."""
        self.data_format.dump(data, self.source)

    def dump_async(self, _data: Data) -> Never:
        """Unsupported."""
        msg = "async streams are not supported for `StreamConfigSource`"
        raise NotImplementedError(msg)

load() -> Data

Load the configuration source.

Return its contents as a dictionary.

Source code in configzen/sources.py
212
213
214
215
216
217
218
def load(self) -> Data:
    """
    Load the configuration source.

    Return its contents as a dictionary.
    """
    return self.data_format.load(self.source)

load_async() -> Never

Unsupported.

Source code in configzen/sources.py
220
221
222
223
def load_async(self) -> Never:
    """Unsupported."""
    msg = "async streams are not supported for `StreamConfigSource`"
    raise NotImplementedError(msg)

dump(data: Data) -> None

Dump the configuration source.

Source code in configzen/sources.py
225
226
227
def dump(self, data: Data) -> None:
    """Dump the configuration source."""
    self.data_format.dump(data, self.source)

dump_async(_data: Data) -> Never

Unsupported.

Source code in configzen/sources.py
229
230
231
232
def dump_async(self, _data: Data) -> Never:
    """Unsupported."""
    msg = "async streams are not supported for `StreamConfigSource`"
    raise NotImplementedError(msg)

FileConfigSource

Bases: Generic[AnyStr], ConfigSource[Path, AnyStr]

A configuration source that is a file.

Parameters:

Name Type Description Default
source str | bytes | PathLike[str] | PathLike[bytes]

The path to the configuration source file.

required
Source code in configzen/sources.py
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
@runtime_generic
class FileConfigSource(
    Generic[AnyStr],
    ConfigSource[Path, AnyStr],
):
    """
    A configuration source that is a file.

    Parameters
    ----------
    source
        The path to the configuration source file.

    """

    def __init__(
        self,
        source: str | bytes | PathLike[str] | PathLike[bytes],
        data_format: str | DataFormat[Any, Any] | None = None,
        *,
        use_processing_trace: bool = True,
        **options: Unpack[FormatOptions],
    ) -> None:
        super().__init__(_make_path(source), data_format=data_format, **options)
        self._use_processing_trace = use_processing_trace

    @property
    def paths(self) -> list[Path]:
        """List possible path variants basing on the processing context trace."""
        from configzen.config import processing

        if (
            not self.source.is_absolute()
            and self._use_processing_trace
            and (processing_context := processing.get())
        ):
            return [
                _make_path(source).parent / self.source
                for config_source in processing_context.trace
                if isinstance(source := config_source.source, (str, bytes, PathLike))
            ]
        return [self.source]  # in current working dir

    def _guess_data_format(self) -> DataFormat[Any, AnyStr]:
        suffix = self.source.suffix
        if suffix:
            extension = suffix.replace(".", "", 1)
            from configzen.data import DataFormat

            data_format_class = DataFormat.extension_registry.get(extension)
            if data_format_class is not None:
                return data_format_class(
                    self.options.get(data_format_class.option_name) or {},
                )
        msg = (
            f"Cannot guess the data format of the configuration source "
            f"with extension {suffix!r}"
        )
        raise NotImplementedError(msg)

    def load(self) -> Data:
        """
        Load the configuration source file and return its contents as a dictionary.

        Parameters
        ----------
        data_format
            The data format to use when loading the data.

        """
        return self.data_format.load(self._temp_stream_factory(self.read()))

    async def load_async(self) -> Data:
        """
        Load the configuration source file asynchronously.

        Return its contents as a dictionary.

        Parameters
        ----------
        data_format
            The data format to use when loading the data.

        """
        return self.data_format.load(self._temp_stream_factory(await self.read_async()))

    def dump(self, data: Data) -> None:
        """
        Dump the configuration data to the source file.

        Parameters
        ----------
        data
            The data to dump to the configuration source.
        data_format
            The data format to use when dumping the data.

        """
        temp_stream = self._temp_stream_factory()
        self.data_format.dump(data, temp_stream)
        temp_stream.seek(0)
        self.write(temp_stream.read())

    async def dump_async(self, data: Data) -> int:
        """
        Load the configuration source file asynchronously.

        Return its contents as a dictionary.

        Parameters
        ----------
        data
            The data to dump to the configuration source.
        data_format
            The data format to use when dumping the data.

        """
        temp_stream = self._temp_stream_factory()
        self.data_format.dump(data, temp_stream)
        temp_stream.seek(0)
        return await self.write_async(temp_stream.read())

    def read(self) -> AnyStr:
        """Read the configuration source and return its contents."""
        errors = []
        reader = Path.read_bytes if self.is_binary() else Path.read_text
        for path in self.paths:
            try:
                return reader(path)
            except FileNotFoundError as e:  # noqa: PERF203
                errors.append(e)
                continue
        raise FileNotFoundError(errors)

    async def read_async(self) -> AnyStr:
        """Read the configuration source file asynchronously and return its contents."""
        errors = []
        reader = AsyncPath.read_bytes if self.is_binary() else AsyncPath.read_text
        for path in map(AsyncPath, self.paths):
            try:
                return await reader(path)
            except FileNotFoundError as e:  # noqa: PERF203
                errors.append(e)
                continue
        raise FileNotFoundError(errors)

    def write(self, content: AnyStr) -> int:
        """
        Write the configuration source file and return the number of bytes written.

        Parameters
        ----------
        content
            The content to write to the configuration source.

        """
        if self.is_binary():
            return self.source.write_bytes(content)
        return self.source.write_text(content)

    async def write_async(self, content: AnyStr) -> int:
        """
        Write the configuration source file asynchronously.

        Return the number of bytes written.

        Parameters
        ----------
        content
            The content to write to the configuration source.

        """
        if self.is_binary():
            return await AsyncPath(self.source).write_bytes(content)
        return await AsyncPath(self.source).write_text(content)

paths: list[Path] property

List possible path variants basing on the processing context trace.

load() -> Data

Load the configuration source file and return its contents as a dictionary.

Parameters:

Name Type Description Default
data_format

The data format to use when loading the data.

required
Source code in configzen/sources.py
305
306
307
308
309
310
311
312
313
314
315
def load(self) -> Data:
    """
    Load the configuration source file and return its contents as a dictionary.

    Parameters
    ----------
    data_format
        The data format to use when loading the data.

    """
    return self.data_format.load(self._temp_stream_factory(self.read()))

load_async() -> Data async

Load the configuration source file asynchronously.

Return its contents as a dictionary.

Parameters:

Name Type Description Default
data_format

The data format to use when loading the data.

required
Source code in configzen/sources.py
317
318
319
320
321
322
323
324
325
326
327
328
329
async def load_async(self) -> Data:
    """
    Load the configuration source file asynchronously.

    Return its contents as a dictionary.

    Parameters
    ----------
    data_format
        The data format to use when loading the data.

    """
    return self.data_format.load(self._temp_stream_factory(await self.read_async()))

dump(data: Data) -> None

Dump the configuration data to the source file.

Parameters:

Name Type Description Default
data Data

The data to dump to the configuration source.

required
data_format

The data format to use when dumping the data.

required
Source code in configzen/sources.py
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
def dump(self, data: Data) -> None:
    """
    Dump the configuration data to the source file.

    Parameters
    ----------
    data
        The data to dump to the configuration source.
    data_format
        The data format to use when dumping the data.

    """
    temp_stream = self._temp_stream_factory()
    self.data_format.dump(data, temp_stream)
    temp_stream.seek(0)
    self.write(temp_stream.read())

dump_async(data: Data) -> int async

Load the configuration source file asynchronously.

Return its contents as a dictionary.

Parameters:

Name Type Description Default
data Data

The data to dump to the configuration source.

required
data_format

The data format to use when dumping the data.

required
Source code in configzen/sources.py
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
async def dump_async(self, data: Data) -> int:
    """
    Load the configuration source file asynchronously.

    Return its contents as a dictionary.

    Parameters
    ----------
    data
        The data to dump to the configuration source.
    data_format
        The data format to use when dumping the data.

    """
    temp_stream = self._temp_stream_factory()
    self.data_format.dump(data, temp_stream)
    temp_stream.seek(0)
    return await self.write_async(temp_stream.read())

read() -> AnyStr

Read the configuration source and return its contents.

Source code in configzen/sources.py
367
368
369
370
371
372
373
374
375
376
377
def read(self) -> AnyStr:
    """Read the configuration source and return its contents."""
    errors = []
    reader = Path.read_bytes if self.is_binary() else Path.read_text
    for path in self.paths:
        try:
            return reader(path)
        except FileNotFoundError as e:  # noqa: PERF203
            errors.append(e)
            continue
    raise FileNotFoundError(errors)

read_async() -> AnyStr async

Read the configuration source file asynchronously and return its contents.

Source code in configzen/sources.py
379
380
381
382
383
384
385
386
387
388
389
async def read_async(self) -> AnyStr:
    """Read the configuration source file asynchronously and return its contents."""
    errors = []
    reader = AsyncPath.read_bytes if self.is_binary() else AsyncPath.read_text
    for path in map(AsyncPath, self.paths):
        try:
            return await reader(path)
        except FileNotFoundError as e:  # noqa: PERF203
            errors.append(e)
            continue
    raise FileNotFoundError(errors)

write(content: AnyStr) -> int

Write the configuration source file and return the number of bytes written.

Parameters:

Name Type Description Default
content AnyStr

The content to write to the configuration source.

required
Source code in configzen/sources.py
391
392
393
394
395
396
397
398
399
400
401
402
403
def write(self, content: AnyStr) -> int:
    """
    Write the configuration source file and return the number of bytes written.

    Parameters
    ----------
    content
        The content to write to the configuration source.

    """
    if self.is_binary():
        return self.source.write_bytes(content)
    return self.source.write_text(content)

write_async(content: AnyStr) -> int async

Write the configuration source file asynchronously.

Return the number of bytes written.

Parameters:

Name Type Description Default
content AnyStr

The content to write to the configuration source.

required
Source code in configzen/sources.py
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
async def write_async(self, content: AnyStr) -> int:
    """
    Write the configuration source file asynchronously.

    Return the number of bytes written.

    Parameters
    ----------
    content
        The content to write to the configuration source.

    """
    if self.is_binary():
        return await AsyncPath(self.source).write_bytes(content)
    return await AsyncPath(self.source).write_text(content)

get_config_source(source: object, _data_format: DataFormat[Any, AnyStr] | None = None) -> ConfigSource[Any, Any]

Get a dedicated interface for a configuration source.

Source code in configzen/sources.py
165
166
167
168
169
170
171
172
173
174
175
176
@singledispatch
def get_config_source(
    source: object,
    _data_format: DataFormat[Any, AnyStr] | None = None,
) -> ConfigSource[Any, Any]:
    """Get a dedicated interface for a configuration source."""
    type_name = type(source).__name__
    msg = (
        f"There is no class operating on {type_name!r} configuration "
        f"sources. Implement it by creating a subclass of ConfigSource."
    )
    raise NotImplementedError(msg)

get_stream_config_source(source: IO[bytes] | IO[str], data_format: DataFormat[Any, Any]) -> StreamConfigSource[str] | StreamConfigSource[bytes]

Get a dedicated interface for a configuration source stream.

Source code in configzen/sources.py
235
236
237
238
239
240
241
242
@get_config_source.register(BytesIO)
@get_config_source.register(StringIO)
def get_stream_config_source(
    source: IO[bytes] | IO[str],
    data_format: DataFormat[Any, Any],
) -> StreamConfigSource[str] | StreamConfigSource[bytes]:
    """Get a dedicated interface for a configuration source stream."""
    return StreamConfigSource(source, data_format=data_format)

get_file_config_source(source: str | bytes | PathLike[str] | PathLike[bytes], data_format: DataFormat[Any, AnyStr] | None = None) -> FileConfigSource[str] | FileConfigSource[bytes]

Get a dedicated interface for a configuration source file.

Source code in configzen/sources.py
422
423
424
425
426
427
428
429
430
@get_config_source.register(str)
@get_config_source.register(bytes)
@get_config_source.register(PathLike)
def get_file_config_source(
    source: str | bytes | PathLike[str] | PathLike[bytes],
    data_format: DataFormat[Any, AnyStr] | None = None,
) -> FileConfigSource[str] | FileConfigSource[bytes]:
    """Get a dedicated interface for a configuration source file."""
    return FileConfigSource(source, data_format=data_format)

typedefs

Miscellaneous type definitions for configzen.