diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 0000000..afc97b4 --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file records the configuration used when building these files. When it is not found, a full rebuild will be done. +config: c5116bd8006bfc41e0314f4cbe68c127 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/_modules/index.html b/_modules/index.html new file mode 100644 index 0000000..d0adc01 --- /dev/null +++ b/_modules/index.html @@ -0,0 +1,128 @@ + + + + + + + + Overview: module code — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+ + +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/core/config.html b/_modules/vgslify/core/config.html new file mode 100644 index 0000000..2925fea --- /dev/null +++ b/_modules/vgslify/core/config.html @@ -0,0 +1,296 @@ + + + + + + + + vgslify.core.config — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.core.config

+# Imports
+
+# > Standard library
+from dataclasses import dataclass
+
+
+
+[docs] +@dataclass +class Conv2DConfig: + """ + Configuration for 2D Convolutional layer. + + Parameters + ---------- + activation : str + Activation function to use. + kernel_size : tuple + Size of the convolution kernels. + strides : tuple + Stride length of the convolution. + filters : int + Number of output filters in the convolution. + """ + activation: str + kernel_size: tuple + strides: tuple + filters: int
+ + + +
+[docs] +@dataclass +class Pooling2DConfig: + """ + Configuration for 2D Pooling layer. + + Parameters + ---------- + pool_type : str + Type of pooling operation (e.g., 'max', 'average'). + pool_size : tuple + Size of the pooling window. + strides : tuple + Stride length of the pooling operation. + """ + pool_type: str + pool_size: tuple + strides: tuple
+ + + +
+[docs] +@dataclass +class DenseConfig: + """ + Configuration for Dense (Fully Connected) layer. + + Parameters + ---------- + activation : str + Activation function to use. + units : int + Number of neurons in the dense layer. + """ + activation: str + units: int
+ + + +
+[docs] +@dataclass +class RNNConfig: + """ + Configuration for Recurrent Neural Network layer. + + Parameters + ---------- + units : int + Number of RNN units. + return_sequences : bool + Whether to return the last output or the full sequence. + go_backwards : bool + If True, process the input sequence backwards. + dropout : float + Fraction of the units to drop for the linear transformation of the inputs. + recurrent_dropout : float + Fraction of the units to drop for the linear transformation of the recurrent state. + rnn_type : str, optional + Type of RNN (e.g., 'simple', 'lstm', 'gru'). + bidirectional : bool, optional + If True, create a bidirectional RNN. + """ + units: int + return_sequences: bool + go_backwards: bool + dropout: float + recurrent_dropout: float + rnn_type: str = None + bidirectional: bool = False
+ + +
+[docs] +@dataclass +class DropoutConfig: + """ + Configuration for Dropout layer. + + Parameters + ---------- + rate : float + Fraction of the input units to drop. + """ + rate: float
+ + + +
+[docs] +@dataclass +class ReshapeConfig: + """ + Configuration for Reshape layer. + + Parameters + ---------- + target_shape : tuple + Target shape of the output. + """ + target_shape: tuple
+ + + +
+[docs] +@dataclass +class InputConfig: + """ + Configuration for Input layer. + + Parameters + ---------- + batch_size : int + Size of the batches of data. + depth : int + Depth of the input (for 3D inputs). + height : int + Height of the input. + width : int + Width of the input. + channels : int + Number of channels in the input. + """ + batch_size: int + depth: int + height: int + width: int + channels: int
+ + +
+[docs] +@dataclass +class ActivationConfig: + """ + Configuration for Activation layer. + + Parameters + ---------- + activation : str + Activation function to use. + """ + activation: str
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/core/factory.html b/_modules/vgslify/core/factory.html new file mode 100644 index 0000000..ce5e2fe --- /dev/null +++ b/_modules/vgslify/core/factory.html @@ -0,0 +1,926 @@ + + + + + + + + vgslify.core.factory — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.core.factory

+# Imports
+
+# > Standard Libraries
+from abc import ABC, abstractmethod
+from typing import Any, Tuple
+import math
+
+# > Internal dependencies
+from vgslify.core.parser import (parse_dropout_spec, parse_activation_spec,
+                                 parse_reshape_spec, parse_conv2d_spec,
+                                 parse_pooling2d_spec, parse_dense_spec,
+                                 parse_rnn_spec, parse_input_spec)
+from vgslify.core.config import (Conv2DConfig, Pooling2DConfig, DenseConfig,
+                                 RNNConfig, DropoutConfig, ReshapeConfig,
+                                 InputConfig)
+
+
+
+[docs] +class LayerFactory(ABC): + """ + Abstract base class for creating neural network layers from VGSL specifications. + + This class defines the interface that must be implemented by concrete factories + for different frameworks (e.g., TensorFlow, PyTorch). It also provides common + methods for output shape calculations to be used by subclasses. + + Parameters + ---------- + input_shape : tuple of int, optional + The initial input shape for the model. + data_format : str, default 'channels_last' + The data format for the input tensor. Either 'channels_last' or 'channels_first'. + + Attributes + ---------- + layers : list + A list to store the created layers. + data_format : str + The data format for the input tensor. + shape : tuple of int + The current shape of the output tensor. + _input_shape : tuple of int + The initial input shape for the model. + + Notes + ----- + This is an abstract base class. Use a concrete implementation like + `TensorFlowLayerFactory` or `PyTorchLayerFactory` in your code. + + This class uses a naming convention where public methods for creating layers + (e.g., conv2d) have corresponding private methods with an underscore prefix + (e.g., _conv2d) that handle the actual layer creation. + + Examples + -------- + >>> # Assuming we have a TensorFlowLayerFactory implementation + >>> factory = TensorFlowLayerFactory(input_shape=(224, 224, 3)) + >>> factory.conv2d('Cr3,3,32') + >>> factory.pooling2d('Mp2,2,2,2') + >>> factory.flatten('Flt') + >>> factory.dense('Fs128') + >>> model = factory.build('my_model') + """ + + def __init__(self, input_shape: Tuple[int, ...] = None, data_format: str = 'channels_last'): + self.layers = [] + self.data_format = data_format + + # Make sure the input shape is valid + if input_shape is not None and not all(isinstance(dim, int) for dim in input_shape): + raise ValueError("Input shape must be a tuple of integers.") + + # Set the input shape if provided + self.shape = input_shape + self._input_shape = input_shape + +
+[docs] + @abstractmethod + def build(self, name: str): + """ + Abstract method to build the final model using the created layers. + + Parameters + ---------- + name : str + The name of the model. + + Returns + ------- + Any + The final built model. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1)) + >>> factory.conv2d('Cr3,3,32') + >>> factory.flatten('Flt') + >>> factory.dense('Fs10') + >>> model = factory.build('my_model') + """ + pass
+ + + # Layer creation methods +
+[docs] + def input(self, spec: str): + """ + Create an Input layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Input layer. + + Returns + ------- + Any + The created Input layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory() + >>> factory.input('1,28,28,1') + """ + config = parse_input_spec(spec) + + # Adjust input shape based on the parsed dimensions + if config.channels is not None and config.depth is not None: + # 4D input: shape = (depth, height, width, channels) + input_shape = (config.depth, config.height, + config.width, config.channels) + elif config.channels is not None: + # 3D input: shape = (height, width, channels) + input_shape = (config.height, config.width, config.channels) + elif config.height is not None: + # 2D input: shape = (height, width) + input_shape = (config.height, config.width) + else: + # 1D input: shape = (width,) + input_shape = (config.width,) + + # Adjust for data format + if self.data_format == 'channels_first': + if len(input_shape) == 3: + input_shape = (input_shape[2], input_shape[0], input_shape[1]) + elif len(input_shape) == 4: + input_shape = (input_shape[3], input_shape[0], + input_shape[1], input_shape[2]) + + self.shape = input_shape + self._input_shape = input_shape + + input_layer = self._input(config, input_shape) + if input_layer is not None: + # Some backends may not return the layer + self._add_layer(input_layer) + + return input_layer
+ + +
+[docs] + def conv2d(self, spec: str): + """ + Create a 2D Convolutional layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Conv2D layer. + + Returns + ------- + Any + The created Conv2D layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1)) + >>> factory.conv2d('Cr3,3,32') + """ + config = parse_conv2d_spec(spec) + self._validate_input_shape() + + conv_layer = self._conv2d(config) + self._add_layer(conv_layer) + + # Add activation if needed + if config.activation: + activation_layer = self._activation(config.activation) + self._add_layer(activation_layer) + + # Update shape + new_shape = self._compute_conv_output_shape(self.shape, config) + self._update_shape(new_shape) + + return conv_layer
+ + +
+[docs] + def pooling2d(self, spec: str): + """ + Create a 2D Pooling layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Pooling2D layer. + + Returns + ------- + Any + The created Pooling2D layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 32)) + >>> factory.pooling2d('Mp2,2,2,2') + """ + config = parse_pooling2d_spec(spec) + self._validate_input_shape() + + pool_layer = self._pooling2d(config) + self._add_layer(pool_layer) + + # Update shape + new_shape = self._compute_pool_output_shape(self.shape, config) + self._update_shape(new_shape) + + return pool_layer
+ + +
+[docs] + def dense(self, spec: str): + """ + Create a Dense (Fully Connected) layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Dense layer. + + Returns + ------- + Any + The created Dense layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(7*7*32,)) + >>> factory.dense('Fs128') + """ + config = parse_dense_spec(spec) + self._validate_input_shape() + + dense_layer = self._dense(config) + self._add_layer(dense_layer) + + # Add activation if needed + if config.activation: + activation_layer = self._activation(config.activation) + self._add_layer(activation_layer) + + # Update shape + self._update_shape((config.units,)) + + return dense_layer
+ + +
+[docs] + def rnn(self, spec: str): + """ + Create an RNN layer (LSTM or GRU), either unidirectional or bidirectional, based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the RNN layer. + + Returns + ------- + Any + The created RNN layer (either unidirectional or bidirectional). + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28)) + >>> factory.rnn('Ls128') # Unidirectional LSTM + >>> factory.rnn('Bl128') # Bidirectional LSTM + """ + config = parse_rnn_spec(spec) + self._validate_input_shape() + + rnn_layer = self._rnn(config) + self._add_layer(rnn_layer) + + # Update shape + if config.return_sequences: + if config.bidirectional: + self._update_shape((self.shape[0], config.units * 2)) + else: + self._update_shape((self.shape[0], config.units)) + else: + if config.bidirectional: + self._update_shape((config.units * 2,)) + else: + self._update_shape((config.units,)) + + return rnn_layer
+ + +
+[docs] + def batchnorm(self, spec: str): + """ + Create a BatchNormalization layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the BatchNormalization layer. + + Returns + ------- + Any + The created BatchNormalization layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 32)) + >>> factory.batchnorm('Bn') + """ + if spec != 'Bn': + raise ValueError( + f"BatchNormalization layer spec '{spec}' is incorrect. Expected 'Bn'.") + + self._validate_input_shape() + + batchnorm_layer = self._batchnorm() + self._add_layer(batchnorm_layer) + + # Shape remains the same + return batchnorm_layer
+ + +
+[docs] + def dropout(self, spec: str): + """ + Create a Dropout layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Dropout layer. + + Returns + ------- + Any + The created Dropout layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(128,)) + >>> factory.dropout('D50') + """ + config = parse_dropout_spec(spec) + layer = self._dropout(config) + self.layers.append(layer) + # Shape remains the same + return layer
+ + +
+[docs] + def activation(self, spec: str): + """ + Create an Activation layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Activation layer. + + Returns + ------- + Any + The created Activation layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(128,)) + >>> factory.activation('Ar') + """ + activation_function = parse_activation_spec(spec) + layer = self._activation(activation_function) + self.layers.append(layer) + # Shape remains the same + return layer
+ + +
+[docs] + def reshape(self, spec: str): + """ + Create a Reshape layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + VGSL specification string for the Reshape layer. Can be: + - 'Rc(2|3)': Collapse spatial dimensions (height, width, and channels). + - 'R<x>,<y>,<z>': Reshape to the specified target shape. + + Returns + ------- + Any + The created Reshape layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1)) + >>> factory.reshape('Rc3') + """ + if self.shape is None: + raise ValueError("Input shape must be set before adding layers.") + + # Handle 'Rc' (collapse spatial dimensions) specification + if spec.startswith('Rc'): + if spec == 'Rc2': + # Flatten to (batch_size, -1) + layer = self._flatten() + self.layers.append(layer) + self.shape = (int(self._compute_flatten_shape(self.shape)),) + return layer + + elif spec == 'Rc3': + # Reshape to (batch_size, seq_length, features) + if len(self.shape) != 3: + raise ValueError( + f"Expected a 3D input shape for 'Rc3', got {self.shape}") + + if self.data_format == 'channels_first': + C, H, W = self.shape + else: # channels_last + H, W, C = self.shape + + # Handle variable height + if H is None: + seq_length = None + else: + seq_length = H * W if W is not None else None + + features = C + config = ReshapeConfig(target_shape=(seq_length, features)) + layer = self._reshape(config) + self.layers.append(layer) + self.shape = (seq_length, features) + return layer + + else: + raise ValueError(f"Unsupported Rc variant: {spec}") + + # Handle regular reshape (e.g., 'R64,64,3') + config = parse_reshape_spec(spec) + layer = self._reshape(config) + self.layers.append(layer) + self.shape = config.target_shape + return layer
+ + +
+[docs] + def flatten(self, spec: str): + """ + Create a Flatten layer based on the VGSL specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for the Flatten layer. + + Returns + ------- + Any + The created Flatten layer. + + Examples + -------- + >>> # Using a hypothetical concrete implementation + >>> factory = SomeConcreteLayerFactory(input_shape=(7, 7, 64)) + >>> factory.flatten('Flt') + """ + if spec != "Flt": + raise ValueError( + f"Flatten layer spec '{spec}' is incorrect. Expected 'Flt'.") + + layer = self._flatten() + self.layers.append(layer) + # Update shape + self.shape = (self._compute_flatten_shape(self.shape),) + return layer
+ + + # Abstract methods + @abstractmethod + def _input(self, config: InputConfig, input_shape: Tuple[int, ...]): + """ + Abstract method to create an Input layer. + + Parameters + ---------- + config : InputConfig + The configuration object returned by parse_input_spec. + input_shape : tuple of int + The input shape. + + Returns + ------- + Any + The created Input layer. + """ + pass + + @abstractmethod + def _conv2d(self, config: Conv2DConfig): + """ + Abstract method to create a Conv2D layer. + + Parameters + ---------- + config : Conv2DConfig + The configuration object returned by parse_conv2d_spec. + + Returns + ------- + Any + The created Conv2D layer. + """ + pass + + @abstractmethod + def _pooling2d(self, config: Pooling2DConfig): + """ + Abstract method to create a Pooling2D layer. + + Parameters + ---------- + config : Pooling2DConfig + The configuration object returned by parse_pooling2d_spec. + + Returns + ------- + Any + The created Pooling2D layer. + """ + pass + + @abstractmethod + def _dense(self, config: DenseConfig): + """ + Abstract method to create a Dense (Fully Connected) layer. + + Parameters + ---------- + config : DenseConfig + The configuration object returned by parse_dense_spec. + + Returns + ------- + Any + The created Dense layer. + """ + pass + + @abstractmethod + def _rnn(self, config: RNNConfig): + """ + Abstract method to create an RNN layer (LSTM or GRU). + + Parameters + ---------- + config : RNNConfig + The configuration object returned by parse_rnn_spec. + + Returns + ------- + Any + The created RNN layer. + """ + pass + + @abstractmethod + def _batchnorm(self): + """ + Abstract method to create a BatchNormalization layer. + + Returns + ------- + Any + The created BatchNormalization layer. + """ + pass + + @abstractmethod + def _dropout(self, config: DropoutConfig): + """ + Abstract method to create a Dropout layer. + + Parameters + ---------- + config : DropoutConfig + The configuration object returned by parse_dropout_spec. + + Returns + ------- + Any + The created Dropout layer. + """ + pass + + @abstractmethod + def _activation(self, activation_function: str): + """ + Abstract method to create an Activation layer. + + Parameters + ---------- + activation_function : str + Name of the activation function. + + Returns + ------- + Any + The created Activation layer. + """ + pass + + @abstractmethod + def _reshape(self, config: ReshapeConfig): + """ + Abstract method to create a Reshape layer. + + Parameters + ---------- + config : ReshapeConfig + The configuration object returned by parse_reshape_spec. + + Returns + ------- + Any + The created Reshape layer. + """ + pass + + @abstractmethod + def _flatten(self): + """ + Abstract method to create a Flatten layer. + + Returns + ------- + Any + The created Flatten layer. + """ + pass + + # Helper methods + def _compute_conv_output_shape(self, + input_shape: Tuple[int, ...], + config: Conv2DConfig) -> Tuple[int, ...]: + """ + Computes the output shape of a convolutional layer. + + Parameters + ---------- + input_shape : tuple + The input shape. + config : Conv2DConfig + The configuration object returned by parse_conv2d_spec. + + Returns + ------- + tuple + The output shape after the convolution. + """ + if self.data_format == 'channels_last': + H_in, W_in, C_in = input_shape + else: + C_in, H_in, W_in = input_shape + + # Compute output dimensions based on padding and strides + # Adjust calculations based on the backend's handling of padding + + # Example computation for 'same' padding + H_out = math.ceil(H_in / config.strides[0]) \ + if H_in is not None else None + W_out = math.ceil(W_in / config.strides[1]) \ + if W_in is not None else None + C_out = config.filters + + if self.data_format == 'channels_last': + return (H_out, W_out, C_out) + else: + return (C_out, H_out, W_out) + + def _compute_pool_output_shape(self, + input_shape: Tuple[int, ...], + config: Pooling2DConfig) -> Tuple[int, ...]: + """ + Computes the output shape of a pooling layer. + + Parameters + ---------- + input_shape : tuple + The input shape. + config : Pooling2DConfig + The configuration object returned by parse_pooling2d_spec. + + Returns + ------- + tuple + The output shape after pooling. + """ + if self.data_format == 'channels_last': + H_in, W_in, C_in = input_shape + else: + C_in, H_in, W_in = input_shape + + # Compute output dimensions based on pooling size and strides + H_out = (H_in + config.strides[0] - 1) // config.strides[0] \ + if H_in is not None else None + W_out = (W_in + config.strides[1] - 1) // config.strides[1] if \ + W_in is not None else None + + if self.data_format == 'channels_last': + return (H_out, W_out, C_in) + else: + return (C_in, H_out, W_out) + + def _compute_flatten_shape(self, shape: Tuple[int, ...]) -> int: + """ + Computes the shape after flattening the input. + + Parameters + ---------- + shape : tuple + The input shape. + + Returns + ------- + int + The product of all dimensions. + """ + from functools import reduce + from operator import mul + return reduce(mul, shape) + + def _validate_input_shape(self): + """ + Validates that the input shape has been set before adding layers. + + Raises + ------ + ValueError + If the input shape has not been set. + """ + if self.shape is None: + raise ValueError("Input shape must be set before adding layers.") + + def _add_layer(self, layer: Any): + """ + Adds a layer to the list of layers. + + Parameters + ---------- + layer : Any + The layer to be added. + """ + self.layers.append(layer) + + def _update_shape(self, new_shape: Tuple[int, ...]): + """ + Updates the current shape of the output tensor. + + Parameters + ---------- + new_shape : tuple of int + The new shape to set. + """ + self.shape = new_shape
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/core/parser.html b/_modules/vgslify/core/parser.html new file mode 100644 index 0000000..17635cf --- /dev/null +++ b/_modules/vgslify/core/parser.html @@ -0,0 +1,606 @@ + + + + + + + + vgslify.core.parser — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.core.parser

+# Imports
+
+# > Standard Libraries
+import re
+
+# > Internal dependencies
+from vgslify.core.config import (Conv2DConfig, Pooling2DConfig, DenseConfig,
+                                 RNNConfig, DropoutConfig, ReshapeConfig, InputConfig)
+from vgslify.core.utils import get_activation_function
+
+
+
+[docs] +def parse_spec(model_spec: str) -> list: + """ + Parse the full model spec string into a list of individual layer specs. + + Parameters + ---------- + model_spec : str + The VGSL specification string defining the model architecture. + + Returns + ------- + list + A list of layer specification strings. + """ + return model_spec.split()
+ + + +
+[docs] +def parse_conv2d_spec(spec: str) -> Conv2DConfig: + """ + Parses a VGSL specification string for a Conv2D layer and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the convolutional layer. Expected format: + `C(s|t|r|l|m)<x>,<y>,[<s_x>,<s_y>,]<d>` + - (s|t|r|l|m): Activation type. + - <x>,<y>: Kernel size. + - <s_x>,<s_y>: Optional strides (defaults to (1, 1) if not provided). + - <d>: Number of filters (depth). + + Returns + ------- + Conv2DConfig + Parsed configuration for the Conv2D layer. + + Raises + ------ + ValueError: + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> from vgslify.core.parser import parse_conv2d_spec + >>> config = parse_conv2d_spec("Cr3,3,64") + >>> print(config) + Conv2DConfig(activation='relu', kernel_size=(3, 3), strides=(1, 1), filters=64) + """ + + # Extract convolutional parameters + conv_filter_params = [int(match) for match in re.findall(r'\d+', spec)] + + # Check if the layer format is as expected + if len(conv_filter_params) < 3: + raise ValueError(f"Conv layer {spec} has too few parameters. " + "Expected format: C(s|t|r|l|m)<x>,<y>,<d> or " + "C(s|t|r|l|m)<x>,<y>,<s_x>,<s_y>,<d>") + if len(conv_filter_params) > 5: + raise ValueError(f"Conv layer {spec} has too many parameters. " + "Expected format: C(s|t|r|l|m)<x>,<y>,<d> or " + "C(s|t|r|l|m)<x>,<y>,<s_x>,<s_y>,<d>") + + # Extract activation function + try: + activation = get_activation_function(spec[1]) + except ValueError: + activation = None # Fall back to default activation + + # Check parameter length and assign kernel size, strides, and filters + if len(conv_filter_params) == 3: + x, y, d = conv_filter_params + strides = (1, 1) # Default stride + elif len(conv_filter_params) == 5: + x, y, s_x, s_y, d = conv_filter_params + strides = (s_x, s_y) + else: + raise ValueError(f"Invalid number of parameters in {spec}") + + kernel_size = (y, x) + + # Return the parsed configuration + return Conv2DConfig( + activation=activation, + kernel_size=kernel_size, + strides=strides, + filters=d + )
+ + + +
+[docs] +def parse_pooling2d_spec(spec: str) -> Pooling2DConfig: + """ + Parses a VGSL specification string for a Pooling2D layer and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the pooling layer. Expected format: + `Mp<x>,<y>[,<s_x>,<s_y>]` or `Ap<x>,<y>[,<s_x>,<s_y>]` + - <x>,<y>: Pool size. + - <s_x>,<s_y>: Strides. If not specified, defaults to pool size. + + Returns + ------- + Pooling2DConfig + Parsed configuration for the Pooling2D layer. + + Raises + ------ + ValueError: + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_pooling2d_spec("Mp2,2") + >>> print(config) + Pooling2DConfig(pool_size=(2, 2), strides=(2, 2)) + >>> config = parse_pooling2d_spec("Mp2,2,1,1") + >>> print(config) + Pooling2DConfig(pool_size=(2, 2), strides=(1, 1)) + """ + + # Extract pooling type + pool_type = spec[:2] + if pool_type not in ['Mp', 'Ap']: + raise ValueError(f"Invalid pooling type '{pool_type}' in {spec}. " + "Expected 'Mp' for MaxPooling or 'Ap' for AveragePooling.") + + pool_type = 'max' if pool_type == 'Mp' else 'avg' + + # Extract pooling and stride parameters + pool_stride_params = [int(match) for match in re.findall(r'-?\d+', spec)] + + # Check if the parameters are as expected + if len(pool_stride_params) not in [2, 4]: + raise ValueError(f"Pooling layer {spec} does not have the expected number of parameters. " + "Expected format: <p><x>,<y>[,<stride_x>,<stride_y>]") + + pool_x, pool_y = pool_stride_params[:2] + + # If strides are not specified, set them equal to the pool size + if len(pool_stride_params) == 2: + stride_x, stride_y = pool_x, pool_y + else: + stride_x, stride_y = pool_stride_params[2:] + + # Check if pool and stride values are valid + if pool_x <= 0 or pool_y <= 0 or stride_x <= 0 or stride_y <= 0: + raise ValueError(f"Invalid values for pooling or stride in {spec}. " + "All values should be positive integers.") + + return Pooling2DConfig(pool_type=pool_type, + pool_size=(pool_x, pool_y), + strides=(stride_x, stride_y))
+ + + +
+[docs] +def parse_dense_spec(spec: str) -> DenseConfig: + """ + Parses a VGSL specification string for a Dense layer and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the dense layer. Expected format: `F(s|t|r|l|m)<d>` + - `(s|t|r|l|m)`: Non-linearity type. One of sigmoid, tanh, relu, + linear, or softmax. + - `<d>`: Number of outputs (units). + + Returns + ------- + DenseConfig + Parsed configuration for the Dense layer. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_dense_spec("Fr64") + >>> print(config) + DenseConfig(activation='relu', units=64) + """ + + # Ensure the layer string format is as expected + if not re.match(r'^F[a-z]-?\d+$', spec): + raise ValueError( + f"Dense layer {spec} is of unexpected format. Expected format: F(s|t|r|l|m)<d>." + ) + + # Extract the activation function + try: + activation = get_activation_function(spec[1]) + except ValueError as e: + raise ValueError( + f"Invalid activation function '{spec[1]}' for Dense layer {spec}. " + "Expected one of 's', 't', 'r', 'l', or 'm'.") from e + + # Extract the number of neurons (units) + units = int(spec[2:]) + if units <= 0: + raise ValueError( + f"Invalid number of neurons {units} for Dense layer {spec}.") + + # Return the parsed configuration + return DenseConfig( + activation=activation, + units=units + )
+ + + +
+[docs] +def parse_rnn_spec(spec: str) -> RNNConfig: + """ + Parses a VGSL specification string for an RNN layer (LSTM, GRU, Bidirectional) + and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the RNN layer. Expected format: + For LSTM/GRU: `(L|G)(f|r)[s]<n>[,D<rate>,Rd<rate>]` + For Bidirectional: `B(g|l)<n>[,D<rate>,Rd<rate>]` + + Returns + ------- + RNNConfig + Parsed configuration for the RNN layer. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_rnn_spec("Lf64,D50,Rd25") + >>> print(config) + RNNConfig(units=64, return_sequences=True, go_backwards=False, dropout=0.5, + recurrent_dropout=0.25) + """ + + match = re.match( + r'([LGB])([frgl])(s?)(-?\d+),?(D-?\d+)?,?(Rd-?\d+)?$', spec) + if not match: + raise ValueError( + f"RNN layer {spec} is of unexpected format. Expected format: " + "L(f|r)[s]<n>[,D<rate>,Rd<rate>], G(f|r)[s]<n>[,D<rate>,Rd<rate>], " + "or B(g|l)<n>[,D<rate>,Rd<rate>]." + ) + + layer_type, rnn_type, summarize, units, dropout, recurrent_dropout = match.groups() + + units = int(units) + dropout = 0 if dropout is None else int(dropout.replace('D', "")) / 100 + recurrent_dropout = 0 if recurrent_dropout is None else int( + recurrent_dropout.replace("Rd", "")) / 100 + + # Validation + if units <= 0: + raise ValueError( + f"Invalid number of units {units} for RNN layer {spec}.") + if dropout < 0 or dropout > 1: + raise ValueError("Dropout rate must be between 0 and 1.") + if recurrent_dropout < 0 or recurrent_dropout > 1: + raise ValueError("Recurrent dropout rate must be between 0 and 1.") + + # Return RNNConfig with parsed parameters + return RNNConfig( + units=units, + return_sequences=bool(summarize) if layer_type == 'L' else True, + go_backwards=rnn_type == 'r', + dropout=dropout, + recurrent_dropout=recurrent_dropout, + rnn_type=rnn_type, + bidirectional=layer_type == 'B' + )
+ + + +
+[docs] +def parse_dropout_spec(spec: str) -> DropoutConfig: + """ + Parses a VGSL specification string for a Dropout layer and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the Dropout layer. Expected format: + `D<rate>` where <rate> is the dropout percentage (0-100). + + Returns + ------- + DropoutConfig + Parsed configuration for the Dropout layer. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_dropout_spec("D50") + >>> print(config) + DropoutConfig(rate=0.5) + """ + + match = re.match(r'D(-?\d+)$', spec) + if not match: + raise ValueError( + f"Dropout layer {spec} is of unexpected format. Expected format: D<rate>." + ) + + dropout_rate = int(match.group(1)) + + if dropout_rate < 0 or dropout_rate > 100: + raise ValueError("Dropout rate must be in the range [0, 100].") + + return DropoutConfig(rate=dropout_rate / 100)
+ + + +
+[docs] +def parse_activation_spec(spec: str) -> str: + """ + Parses a VGSL specification string for an Activation layer and returns the activation function. + + Parameters + ---------- + spec : str + VGSL specification for the Activation layer. Expected format: `A(s|t|r|l|m)` + - `s`: softmax + - `t`: tanh + - `r`: relu + - `l`: linear + - `m`: sigmoid + + Returns + ------- + str + The activation function name. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> activation = parse_activation_spec("Ar") + >>> print(activation) + 'relu' + """ + + match = re.match(r'A([strlm])$', spec) + if not match: + raise ValueError( + f"Activation layer spec '{spec}' is incorrect. Expected format: A(s|t|r|l|m).") + + return get_activation_function(match.group(1))
+ + + +
+[docs] +def parse_reshape_spec(spec: str) -> ReshapeConfig: + """ + Parses a VGSL specification string for a Reshape layer and returns the target shape. + + Parameters + ---------- + spec : str + VGSL specification for the Reshape layer. Expected format: `R<x>,<y>,<z>` + + Returns + ------- + ReshapeConfig + Parsed configuration for the Reshape layer. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_reshape_spec("R64,64,3") + >>> print(config) + ReshapeConfig(target_shape=(64, 64, 3)) + """ + + match = re.match(r'R(-?\d+),(-?\d+)(?:,(-?\d+))?$', spec) + if not match: + raise ValueError( + f"Reshape layer spec '{spec}' is incorrect. Expected format: R<x>,<y>[,<z>].") + + target_shape = tuple(int(x) for x in match.groups() if x) + return ReshapeConfig(target_shape=target_shape)
+ + + +
+[docs] +def parse_input_spec(spec: str) -> InputConfig: + """ + Parses a VGSL specification string for an Input layer and returns the parsed configuration. + + Parameters + ---------- + spec : str + VGSL specification for the Input layer. Supported format: + `<batch_size>,<depth>,<height>,<width>,<channels>` for 4D inputs, + `<batch_size>,<height>,<width>,<channels>` for 3D inputs, + `<batch_size>,<height>,<width>` for 2D inputs, + `<batch_size>,<width>` for 1D inputs. + + Returns + ------- + InputConfig + Parsed configuration for the Input layer. + + Raises + ------ + ValueError + If the provided VGSL spec string does not match the expected format. + + Examples + -------- + >>> config = parse_input_spec("None,224,224,3") + >>> print(config) + InputConfig(batch_size=None, width=224, depth=None, height=224, channels=3) + """ + try: + dims = spec.split(",") + if len(dims) == 5: + batch, depth, height, width, channels = dims + elif len(dims) == 4: + batch, height, width, channels = dims + depth = None + elif len(dims) == 3: + batch, height, width = dims + depth, channels = None, None + elif len(dims) == 2: + batch, width = dims + height, depth, channels = None, None, None + else: + raise ValueError(f"Invalid input spec: {spec}") + + return InputConfig( + batch_size=None if batch == "None" else int(batch), + width=None if width == "None" else int(width), + depth=None if depth == "None" else int(depth) if depth else None, + height=None if height == "None" else int( + height) if height else None, + channels=None if channels == "None" else int( + channels) if channels else None + ) + except ValueError as e: + raise ValueError( + f"Invalid input string format '{spec}'. Expected valid VGSL format.") from e
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/core/utils.html b/_modules/vgslify/core/utils.html new file mode 100644 index 0000000..c2d96e6 --- /dev/null +++ b/_modules/vgslify/core/utils.html @@ -0,0 +1,158 @@ + + + + + + + + vgslify.core.utils — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.core.utils

+
+[docs] +def get_activation_function(activation_char: str) -> str: + """ + Maps a VGSL activation character to the corresponding Keras activation function. + + Parameters + ---------- + activation_char : str + The character representing the activation function in the VGSL spec. + + Returns + ------- + str + The name of the Keras activation function. + + Raises + ------ + ValueError + If the provided activation character is not recognized. + + Examples + -------- + >>> activation = get_activation_function('r') + >>> print(activation) + 'relu' + """ + activation_map = { + 's': 'softmax', + 't': 'tanh', + 'r': 'relu', + 'l': 'linear', + 'm': 'sigmoid' + } + + if activation_char not in activation_map: + raise ValueError(f"Invalid activation character '{activation_char}'.") + + return activation_map[activation_char]
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/generator.html b/_modules/vgslify/generator.html new file mode 100644 index 0000000..547942d --- /dev/null +++ b/_modules/vgslify/generator.html @@ -0,0 +1,380 @@ + + + + + + + + vgslify.generator — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.generator

+# Imports
+
+# > Standard Libraries
+from typing import Any, Dict, List
+
+# > Internal Libraries
+from vgslify.core.parser import parse_spec
+
+
+
+[docs] +class VGSLModelGenerator: + """ + VGSLModelGenerator constructs a neural network model based on a VGSL (Variable-size Graph + Specification Language) specification string. This class supports dynamic model generation + for different backends, with current support for TensorFlow and PyTorch. + + The generator takes a VGSL specification string that defines the architecture of the neural + network, including the input layer, convolutional layers, pooling layers, RNN layers, dense + layers, and more. The class parses this string, constructs the layers in sequence, and builds + the final model. + """ + + def __init__(self, backend: str = "auto") -> None: + """ + Initialize the VGSLModelGenerator with the backend. + + Parameters + ---------- + backend : str, optional + The backend to use for building the model. Can be "tensorflow", "torch", or "auto". + Default is "auto", which will attempt to automatically detect the available backend. + model_name : str, optional + The name of the model, by default "VGSL_Model" + """ + self.backend = self._detect_backend(backend) + self.layer_factory_class, self.layer_constructors = self._initialize_backend_and_factory( + self.backend) + self.layer_factory = self.layer_factory_class() + +
+[docs] + def generate_model(self, model_spec: str, model_name: str = "VGSL_Model") -> Any: + """ + Build the model based on the VGSL spec string. + + This method parses the VGSL specification string, creates each layer + using the layer factory, and constructs the model sequentially. + + Parameters + ---------- + model_spec : str + The VGSL specification string defining the model architecture. + + Returns + ------- + Any + The built model using the specified backend. + """ + return self._process_layers(model_spec, return_history=False, model_name=model_name)
+ + +
+[docs] + def generate_history(self, model_spec: str) -> List[Any]: + """ + Generate the history of layer specifications without building the full model. + + This method parses the VGSL specification string, constructs each layer using + the layer factory, and stores them in a list, but does not chain them or connect + input/output layers. + + Parameters + ---------- + model_spec : str + The VGSL specification string defining the model architecture. + + Returns + ------- + list + A list of layers constructed from the specification string. + """ + return self._process_layers(model_spec, return_history=True)
+ + + def _process_layers(self, model_spec: str, return_history: bool = False, model_name: str = "VGSL_Model") -> Any: + """ + Process the VGSL specification string to build the model or generate a history of layers. + + Parameters + ---------- + model_spec : str + The VGSL specification string defining the model architecture. + return_history : bool, optional + If True, returns a list of constructed layers (history) instead of the final model. + model_name : str, optional + The name of the model, by default "VGSL_Model" + + Returns + ------- + Any + The built model using the specified backend if `return_history` is False. + Otherwise, a list of constructed layers. + """ + # Create a new instance of the layer factory for this model + self.layer_factory = self.layer_factory_class() + + # Parse the specification string + specs = parse_spec(model_spec) + + # Initialize the first layer (input layer) + input_layer = self.layer_factory.input(specs[0]) + + # Initialize history if required + history = [input_layer] if return_history else None + + # Process each layer specification + for spec in specs[1:]: + layer = self._construct_layer(spec, self.layer_factory) + if return_history: + history.append(layer) + + if return_history: + return history + + # Build and return the final model + return self.layer_factory.build(name=model_name) + +
+[docs] + def construct_layer(self, spec: str) -> Any: + """ + Constructs a single layer using the layer factory based on the spec string. + + Parameters + ---------- + spec : str + The VGSL specification string for a layer. + + Returns + ------- + Any + The constructed layer. + + Raises + ------ + ValueError + If the layer specification is unknown. + """ + # Create a new instance of the layer factory + layer_factory = self.layer_factory_class() + return self._construct_layer(spec, layer_factory)
+ + + ### Private Helper Methods ### + + def _detect_backend(self, backend: str) -> str: + """ + Detect the backend automatically by checking available libraries. + If both TensorFlow and PyTorch are available, TensorFlow is selected by default. + + Parameters + ---------- + backend : str + The backend to use for building the model. Can be "tensorflow", "torch", or "auto". + + Returns + ------- + str + The detected or provided backend ("tensorflow" or "torch"). + """ + if backend != "auto": + return backend + + try: + import tensorflow as tf + return "tensorflow" + except ImportError: + pass + + try: + import torch + return "torch" + except ImportError: + pass + + raise ImportError( + "Neither TensorFlow nor PyTorch is installed. Please install one of them.") + + def _initialize_backend_and_factory(self, backend: str) -> tuple: + """ + Initialize the backend and return the layer factory class and constructor map. + + Parameters + ---------- + backend : str + The backend to use for building the model. + + Returns + ------- + tuple + A tuple containing the layer factory class and layer constructors dictionary. + """ + try: + if backend == "tensorflow": + from vgslify.tensorflow.layers import TensorFlowLayerFactory as LayerFactory + elif backend == "torch": + from vgslify.torch.layers import TorchLayerFactory as LayerFactory + else: + raise ValueError( + f"Unsupported backend: {backend}. Choose 'tensorflow' or 'torch'.") + except ImportError: + raise ImportError( + f"Backend '{backend}' is not available. Please install the required library.") + + layer_constructors: Dict[str, Any] = { + 'C': LayerFactory.conv2d, + 'Mp': LayerFactory.pooling2d, + 'Ap': LayerFactory.pooling2d, + 'L': LayerFactory.rnn, + 'G': LayerFactory.rnn, + 'B': LayerFactory.rnn, + 'Flt': LayerFactory.flatten, + 'F': LayerFactory.dense, + 'D': LayerFactory.dropout, + 'Bn': LayerFactory.batchnorm, + 'A': LayerFactory.activation, + 'R': LayerFactory.reshape, + 'Rc': LayerFactory.reshape, + } + + return LayerFactory, layer_constructors + + def _construct_layer(self, spec: str, layer_factory) -> Any: + """ + Constructs a layer using the layer factory based on the specification string. + + Parameters + ---------- + spec : str + The VGSL specification string for a layer. + layer_factory : Any + The layer factory instance to use for constructing the layer. + + Returns + ------- + Any + The constructed layer. + + Raises + ------ + ValueError + If the layer specification is unknown. + """ + for prefix in sorted(self.layer_constructors.keys(), key=len, reverse=True): + if spec.startswith(prefix): + layer_constructor = getattr( + layer_factory, self.layer_constructors[prefix].__name__) + return layer_constructor(spec) + + raise ValueError(f"Unknown layer specification: {spec}")
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/parsers/base_parser.html b/_modules/vgslify/parsers/base_parser.html new file mode 100644 index 0000000..8ea2f02 --- /dev/null +++ b/_modules/vgslify/parsers/base_parser.html @@ -0,0 +1,499 @@ + + + + + + + + vgslify.parsers.base_parser — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.parsers.base_parser

+from abc import ABC, abstractmethod
+from typing import List, Union
+from vgslify.core.config import (
+    ActivationConfig,
+    Conv2DConfig,
+    Pooling2DConfig,
+    DenseConfig,
+    RNNConfig,
+    DropoutConfig,
+    ReshapeConfig,
+    InputConfig
+)
+
+
+[docs] +class BaseModelParser(ABC): + """ + Abstract base class for model parsers. + Provides common utility methods for parsing different frameworks and generating VGSL spec strings. + """ + +
+[docs] + def generate_vgsl(self, configs: List[Union[ + Conv2DConfig, + Pooling2DConfig, + DenseConfig, + RNNConfig, + DropoutConfig, + ReshapeConfig, + InputConfig, + ActivationConfig + ]]) -> str: + """ + Convert a list of layer configuration dataclasses into a VGSL specification string. + + Parameters + ---------- + configs : List[Union[Conv2DConfig, Pooling2DConfig, DenseConfig, RNNConfig, + DropoutConfig, ReshapeConfig, InputConfig, ActivationConfig]] + List of layer configurations. + + Returns + ------- + str + VGSL specification string. + """ + vgsl_parts = [] + i = len(configs) - 1 # Start from the end of the list to merge activations + + while i >= 0: + config = configs[i] + + if isinstance(config, ActivationConfig): + # Check if there is a preceding layer to merge with + if i > 0: + preceding_config = configs[i - 1] + if isinstance(preceding_config, (Conv2DConfig, DenseConfig, RNNConfig)) and \ + preceding_config.activation == 'linear': + # Merge the activation into the preceding layer + preceding_config.activation = config.activation + # Skip adding this ActivationConfig + i -= 1 + continue + # If cannot merge, add the activation spec + vgsl_parts.append(self._vgsl_activation(config)) + else: + # Handle non-activation layers and strings + if isinstance(config, InputConfig): + vgsl_parts.append(self._vgsl_input(config)) + elif isinstance(config, Conv2DConfig): + vgsl_parts.append(self._vgsl_conv2d(config)) + elif isinstance(config, Pooling2DConfig): + vgsl_parts.append(self._vgsl_pooling2d(config)) + elif isinstance(config, DenseConfig): + vgsl_parts.append(self._vgsl_dense(config)) + elif isinstance(config, RNNConfig): + vgsl_parts.append(self._vgsl_rnn(config)) + elif isinstance(config, DropoutConfig): + vgsl_parts.append(self._vgsl_dropout(config)) + elif isinstance(config, ReshapeConfig): + vgsl_parts.append(self._vgsl_reshape(config)) + elif isinstance(config, str): + vgsl_parts.append(config) + else: + raise ValueError(f"Unsupported configuration type: {type(config).__name__}") + i -= 1 # Move to the previous config + + # Reverse to restore the original order + return " ".join(vgsl_parts[::-1])
+ + +
+[docs] + @abstractmethod + def parse_model(self, model) -> str: + """Parse the model into a VGSL spec string.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_input(self, layer) -> InputConfig: + """Parse the input layer into a InputConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_conv2d(self, layer) -> Conv2DConfig: + """Parse the Conv2D layer into a Conv2DConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_dense(self, layer) -> DenseConfig: + """Parse the Dense layer into a DenseConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_rnn(self, layer) -> RNNConfig: + """Parse the RNN layer into a RNNConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_pooling(self, layer) -> Pooling2DConfig: + """Parse the Pooling layer into a Pooling2DConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_batchnorm(self, layer) -> str: + """Parse the BatchNorm layer into a VGSL spec string.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_dropout(self, layer) -> DropoutConfig: + """Parse the Dropout layer into a DropoutConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_flatten(self, layer) -> str: + """Parse the Flatten layer into a VGSL spec string.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_reshape(self, layer) -> ReshapeConfig: + """Parse the Reshape layer into a ReshapeConfig dataclass.""" + pass
+ + +
+[docs] + @abstractmethod + def parse_activation(self, layer) -> ActivationConfig: + """Parse the Activation layer into a ActivationConfig dataclass.""" + pass
+ + + # VGSL Generation Methods + def _vgsl_input(self, config: InputConfig) -> str: + """ + Generate VGSL string for input layer. + + Parameters + ---------- + config : InputConfig + Configuration for the input layer. + + Returns + ------- + str + VGSL string representation of the input layer. + """ + return ",".join(map(str, filter(lambda x: x != -1, [ + config.batch_size, + config.depth, + config.height, + config.width, + config.channels + ]))) + + def _vgsl_conv2d(self, config: Conv2DConfig) -> str: + """ + Generate VGSL string for Conv2D layer. + + Parameters + ---------- + config : Conv2DConfig + Configuration for the Conv2D layer. + + Returns + ------- + str + VGSL string representation of the Conv2D layer. + """ + act = self._get_activation_code(config.activation) + stride_spec = ",".join(map(str, config.strides)) if config.strides != (1, 1) else "" + stride_str = f",{stride_spec}" if stride_spec else "" + return f"C{act}{config.kernel_size[0]},{config.kernel_size[1]}{stride_str},{config.filters}" + + def _vgsl_pooling2d(self, config: Pooling2DConfig) -> str: + """ + Generate VGSL string for Pooling2D layer. + + Parameters + ---------- + config : Pooling2DConfig + Configuration for the Pooling2D layer. + + Returns + ------- + str + VGSL string representation of the Pooling2D layer. + """ + pool_type_code = 'Mp' if config.pool_type.lower() == 'max' else 'Ap' + pool_size_str = ",".join(map(str, config.pool_size)) + strides_str = ",".join(map(str, config.strides)) if config.strides != config.pool_size else "" + return f"{pool_type_code}{pool_size_str}{',' + strides_str if strides_str else ''}" + + def _vgsl_dense(self, config: DenseConfig) -> str: + """ + Generate VGSL string for Dense layer. + + Parameters + ---------- + config : DenseConfig + Configuration for the Dense layer. + + Returns + ------- + str + VGSL string representation of the Dense layer. + """ + act = self._get_activation_code(config.activation) + return f"F{act}{config.units}" + + def _vgsl_rnn(self, config: RNNConfig) -> str: + """ + Generate VGSL string for RNN layer. + + Parameters + ---------- + config : RNNConfig + Configuration for the RNN layer. + + Returns + ------- + str + VGSL string representation of the RNN layer. + + Raises + ------ + ValueError + If an unsupported RNN type is provided. + """ + if config.bidirectional: + layer_type = 'B' + rnn_type = 'l' if config.rnn_type.lower() == 'lstm' else 'g' + else: + if config.rnn_type.lower() == 'lstm': + layer_type = 'L' + elif config.rnn_type.lower() == 'gru': + layer_type = 'G' + else: + raise ValueError(f"Unsupported RNN type: {config.rnn_type}") + rnn_type = 'r' if config.go_backwards else 'f' + + return_sequences = 's' if config.return_sequences and not config.bidirectional else '' + + spec = f"{layer_type}{rnn_type}{return_sequences}{config.units}" + + if config.dropout > 0: + spec += f",D{int(config.dropout * 100)}" + if config.recurrent_dropout > 0: + spec += f",Rd{int(config.recurrent_dropout * 100)}" + + return spec + + def _vgsl_dropout(self, config: DropoutConfig) -> str: + """ + Generate VGSL string for Dropout layer. + + Parameters + ---------- + config : DropoutConfig + Configuration for the Dropout layer. + + Returns + ------- + str + VGSL string representation of the Dropout layer. + """ + return f"D{int(config.rate * 100)}" + + def _vgsl_reshape(self, config: ReshapeConfig) -> str: + """ + Generate VGSL string for Reshape layer. + + Parameters + ---------- + config : ReshapeConfig + Configuration for the Reshape layer. + + Returns + ------- + str + VGSL string representation of the Reshape layer. + """ + if len(config.target_shape) == 2 and (None in config.target_shape or -1 in config.target_shape): + return "Rc3" + else: + reshape_dims = ",".join(map(lambda x: str(x) if x is not None else '-1', config.target_shape)) + return f"R{reshape_dims}" + + def _vgsl_activation(self, config: ActivationConfig) -> str: + """ + Generate VGSL string for Activation layer. + + Parameters + ---------- + config : ActivationConfig + Configuration for the Activation layer. + + Returns + ------- + str + VGSL string representation of the Activation layer. + """ + act = self._get_activation_code(config.activation) + return f"A{act}" + + def _get_activation_code(self, activation: str) -> str: + """ + Get the VGSL activation code for a given activation function. + + Parameters + ---------- + activation : str + Name of the activation function. + + Returns + ------- + str + VGSL activation code. + + Raises + ------ + ValueError + If an unsupported activation function is provided. + """ + ACTIVATION_MAP = { + 'softmax': 's', 'tanh': 't', 'relu': 'r', + 'linear': 'l', 'sigmoid': 'm', 'identity': 'l' + } + act_code = ACTIVATION_MAP.get(activation.lower(), None) + if act_code is None: + raise ValueError(f"Unsupported activation '{activation}'.") + return act_code
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/parsers/tf_parser.html b/_modules/vgslify/parsers/tf_parser.html new file mode 100644 index 0000000..5319a27 --- /dev/null +++ b/_modules/vgslify/parsers/tf_parser.html @@ -0,0 +1,512 @@ + + + + + + + + vgslify.parsers.tf_parser — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.parsers.tf_parser

+# Imports
+
+# > Standard Library
+from typing import Callable, Dict, Type, Union
+
+# > Third-Party Dependencies
+import tensorflow as tf
+
+# > Internal
+from vgslify.core.config import (
+    ActivationConfig,
+    Conv2DConfig,
+    Pooling2DConfig,
+    DenseConfig,
+    RNNConfig,
+    DropoutConfig,
+    ReshapeConfig,
+    InputConfig
+)
+from vgslify.parsers.base_parser import BaseModelParser
+
+
+[docs] +class TensorFlowModelParser(BaseModelParser): + """ + Parser for converting TensorFlow Keras models into VGSL (Variable-size Graph Specification Language) spec strings. + + This class extends the BaseModelParser to provide specific functionality for TensorFlow Keras models. + It uses configuration dataclasses to represent different layer types and converts them into + VGSL spec strings. + + Attributes + ---------- + layer_parsers : Dict[Type[tf.keras.layers.Layer], Callable] + A dictionary mapping TensorFlow Keras layer types to their corresponding parsing methods. + + Notes + ----- + This parser supports a wide range of TensorFlow Keras layers and can be extended to support + additional layer types by adding new parsing methods and updating the layer_parsers dictionary. + """ + + def __init__(self): + # Initialize the layer parsers mapping + self.layer_parsers: Dict[Type[tf.keras.layers.Layer], Callable] = { + tf.keras.layers.InputLayer: self.parse_input, + tf.keras.layers.Conv2D: self.parse_conv2d, + tf.keras.layers.Dense: self.parse_dense, + tf.keras.layers.LSTM: self.parse_rnn, + tf.keras.layers.GRU: self.parse_rnn, + tf.keras.layers.Bidirectional: self.parse_rnn, + tf.keras.layers.MaxPooling2D: self.parse_pooling, + tf.keras.layers.AveragePooling2D: self.parse_pooling, + tf.keras.layers.BatchNormalization: self.parse_batchnorm, + tf.keras.layers.Dropout: self.parse_dropout, + tf.keras.layers.Reshape: self.parse_reshape, + tf.keras.layers.Flatten: self.parse_flatten, + tf.keras.layers.Activation: self.parse_activation + } + +
+[docs] + def parse_model(self, model: tf.keras.models.Model) -> str: + """ + Parse a TensorFlow Keras model into a VGSL spec string. + + Parameters + ---------- + model : tf.keras.models.Model + Keras model to be converted. + + Returns + ------- + str + VGSL spec string. + + Raises + ------ + ValueError + If the model contains unsupported layers or if the input shape is invalid. + """ + configs = [] + + # Handle InputLayer + if not isinstance(model.layers[0], tf.keras.layers.InputLayer): + input_layer = tf.keras.layers.InputLayer( + input_shape=model.input_shape[1:], + batch_size=model.input_shape[0] + ) + input_config = self.parse_input(input_layer) + configs.append(input_config) + + # Iterate through all layers in the model + for idx, layer in enumerate(model.layers): + layer_type = type(layer) + parser_func = self.layer_parsers.get(layer_type, None) + + if parser_func: + # Parse the layer + config = parser_func(layer) + + # Append the config if not None + if config: + configs.append(config) + else: + raise ValueError( + f"Unsupported layer type {layer_type.__name__} at position {idx}." + ) + + # Generate VGSL spec string from configs + return self.generate_vgsl(configs)
+ + + + # Parser methods for different layer types + +
+[docs] + def parse_input(self, layer: tf.keras.layers.InputLayer) -> InputConfig: + """ + Parse an InputLayer into an InputConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.InputLayer + The InputLayer to parse. + + Returns + ------- + InputConfig + The configuration for the input layer. + """ + input_shape = layer.output.shape + # Assuming input_shape: (batch_size, depth, height, width, channels) + if len(input_shape) == 5: + batch_size, depth, height, width, channels = input_shape + elif len(input_shape) == 4: + batch_size, height, width, channels = input_shape + depth = -1 + else: + raise ValueError(f"Unsupported input shape: {input_shape}") + + return InputConfig( + batch_size=batch_size, + depth=depth, + height=height, + width=width, + channels=channels + )
+ + +
+[docs] + def parse_conv2d(self, layer: tf.keras.layers.Conv2D) -> Conv2DConfig: + """ + Parse a Conv2D layer into a Conv2DConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.Conv2D + The Conv2D layer to parse. + + Returns + ------- + Conv2DConfig + The configuration for the Conv2D layer. + """ + activation = self._extract_activation(layer) + return Conv2DConfig( + activation=activation, + kernel_size=layer.kernel_size, + strides=layer.strides, + filters=layer.filters + )
+ + +
+[docs] + def parse_dense(self, layer: tf.keras.layers.Dense) -> DenseConfig: + """ + Parse a Dense layer into a DenseConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.Dense + The Dense layer to parse. + + Returns + ------- + DenseConfig + The configuration for the Dense layer. + """ + activation = self._extract_activation(layer) + return DenseConfig( + activation=activation, + units=layer.units + )
+ + +
+[docs] + def parse_rnn(self, layer: Union[tf.keras.layers.LSTM, + tf.keras.layers.GRU, + tf.keras.layers.Bidirectional]) -> RNNConfig: + """ + Parse an RNN layer (LSTM, GRU, or Bidirectional) into an RNNConfig dataclass. + + Parameters + ---------- + layer : Union[tf.keras.layers.LSTM, tf.keras.layers.GRU, tf.keras.layers.Bidirectional] + The RNN layer to parse. + + Returns + ------- + RNNConfig + The configuration for the RNN layer. + """ + if isinstance(layer, tf.keras.layers.Bidirectional): + wrapped_layer = layer.forward_layer + bidirectional = True + else: + wrapped_layer = layer + bidirectional = False + + if isinstance(wrapped_layer, tf.keras.layers.LSTM): + rnn_type = 'lstm' + elif isinstance(wrapped_layer, tf.keras.layers.GRU): + rnn_type = 'gru' + else: + raise ValueError(f"Unsupported RNN layer type {type(wrapped_layer).__name__}.") + + return RNNConfig( + units=wrapped_layer.units, + return_sequences=wrapped_layer.return_sequences, + go_backwards=wrapped_layer.go_backwards if not bidirectional else False, + dropout=wrapped_layer.dropout, + recurrent_dropout=wrapped_layer.recurrent_dropout, + rnn_type=rnn_type, + bidirectional=bidirectional + )
+ + +
+[docs] + def parse_pooling(self, layer: Union[tf.keras.layers.MaxPooling2D, tf.keras.layers.AveragePooling2D], pool_type: str) -> Pooling2DConfig: + """ + Parse a Pooling layer into a Pooling2DConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.MaxPooling2D or tf.keras.layers.AveragePooling2D + The Pooling layer to parse. + pool_type : str + Type of pooling ('max' or 'average'). + + Returns + ------- + Pooling2DConfig + The configuration for the Pooling layer. + """ + return Pooling2DConfig( + pool_type=pool_type, + pool_size=layer.pool_size, + strides=layer.strides if layer.strides else layer.pool_size + )
+ + +
+[docs] + def parse_batchnorm(self, layer: tf.keras.layers.BatchNormalization) -> None: + """ + Parse a BatchNormalization layer. + Since BatchNormalization does not require a VGSL spec beyond 'Bn', return a placeholder. + + Parameters + ---------- + layer : tf.keras.layers.BatchNormalization + The BatchNormalization layer to parse. + + Returns + ------- + None + Indicates that the VGSL spec should include 'Bn'. + """ + return "Bn"
+ + +
+[docs] + def parse_dropout(self, layer: tf.keras.layers.Dropout) -> DropoutConfig: + """ + Parse a Dropout layer into a DropoutConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.Dropout + The Dropout layer to parse. + + Returns + ------- + DropoutConfig + The configuration for the Dropout layer. + """ + return DropoutConfig( + rate=layer.rate + )
+ + +
+[docs] + def parse_flatten(self, layer: tf.keras.layers.Flatten) -> None: + """ + Parse a Flatten layer. + Since Flatten does not require a VGSL spec beyond 'Flatten', return a placeholder. + + Parameters + ---------- + layer : tf.keras.layers.Flatten + The Flatten layer to parse. + + Returns + ------- + None + Indicates that the VGSL spec should include 'Flatten'. + """ + return "Flt"
+ + +
+[docs] + def parse_reshape(self, layer: tf.keras.layers.Reshape) -> ReshapeConfig: + """ + Parse a Reshape layer into a ReshapeConfig dataclass. + + Parameters + ---------- + layer : tf.keras.layers.Reshape + The Reshape layer to parse. + + Returns + ------- + ReshapeConfig + The configuration for the Reshape layer. + """ + target_shape = layer.target_shape + return ReshapeConfig( + target_shape=target_shape + )
+ + +
+[docs] + def parse_activation(self, layer: tf.keras.layers.Activation) -> ActivationConfig: + """ + Parse an Activation layer. + + Parameters + ---------- + layer : tf.keras.layers.Activation + The Activation layer to parse. + + Returns + ------- + ActivationConfig + The configuration for the Activation layer. + """ + activation = self._extract_activation(layer) + return ActivationConfig(activation=activation)
+ + + + # Helper methods + def _extract_activation(self, layer: tf.keras.layers.Layer) -> str: + """ + Extract the activation function from a TensorFlow Keras layer. + + Parameters + ---------- + layer : tf.keras.layers.Layer + The layer from which to extract the activation. + + Returns + ------- + str + The activation function name. + """ + if hasattr(layer, 'activation') and callable(layer.activation): + activation = layer.activation.__name__ + elif isinstance(layer, tf.keras.layers.Activation): + activation = layer.activation.__name__ + else: + activation = 'linear' + return activation
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/parsers/torch_parser.html b/_modules/vgslify/parsers/torch_parser.html new file mode 100644 index 0000000..2890152 --- /dev/null +++ b/_modules/vgslify/parsers/torch_parser.html @@ -0,0 +1,518 @@ + + + + + + + + vgslify.parsers.torch_parser — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.parsers.torch_parser

+# Imports
+
+# > Standard Library
+from typing import Callable, Dict, Type, Union
+import warnings
+
+# > Third-Party Dependencies
+import torch.nn as nn
+
+# > Internal
+from vgslify.core.config import (
+    ActivationConfig,
+    Conv2DConfig,
+    Pooling2DConfig,
+    DenseConfig,
+    RNNConfig,
+    DropoutConfig,
+    ReshapeConfig,
+    InputConfig
+)
+from vgslify.parsers.base_parser import BaseModelParser
+from vgslify.torch.layers import Reshape
+
+
+[docs] +class TorchModelParser(BaseModelParser): + """ + Parser for converting PyTorch models into VGSL (Variable-size Graph Specification Language) spec strings. + + This class extends the BaseModelParser to provide specific functionality for PyTorch models. + It uses configuration dataclasses to represent different layer types and converts them into + VGSL spec strings. + + Attributes + ---------- + layer_parsers : Dict[Type[nn.Module], Callable] + A dictionary mapping PyTorch layer types to their corresponding parsing methods. + + Notes + ----- + This parser supports a wide range of PyTorch layers and can be extended to support + additional layer types by adding new parsing methods and updating the layer_parsers dictionary. + """ + + def __init__(self): + # Initialize the layer parsers mapping + self.layer_parsers: Dict[Type[nn.Module], Callable] = { + nn.Conv2d: self.parse_conv2d, + nn.Linear: self.parse_dense, + nn.LSTM: self.parse_rnn, + nn.GRU: self.parse_rnn, + nn.MaxPool2d: self.parse_pooling, + nn.AvgPool2d: self.parse_pooling, + nn.BatchNorm2d: self.parse_batchnorm, + nn.Dropout: self.parse_dropout, + nn.Flatten: self.parse_flatten, + nn.ReLU: self.parse_activation, + nn.Sigmoid: self.parse_activation, + nn.Tanh: self.parse_activation, + nn.Identity: self.parse_activation, + nn.Softmax: self.parse_activation, + Reshape: self.parse_reshape, + } + +
+[docs] + def parse_model(self, model: nn.Module) -> str: + """ + Parse a PyTorch model into a VGSL spec string. + + Parameters + ---------- + model : nn.Module + PyTorch model to be converted. + + Returns + ------- + str + VGSL spec string. + + Raises + ------ + ValueError + If the model contains unsupported layers or if the input shape is invalid. + """ + configs = [] + + # Extract input shape from the first layer + first_layer = next(model.children()) + input_config = self.parse_input(first_layer) + if input_config: + configs.append(input_config) + + # Iterate through all layers in the model + for name, layer in model.named_modules(): + if isinstance(layer, nn.Sequential): + continue + + layer_type = type(layer) + parser_func = self.layer_parsers.get(layer_type, None) + + if parser_func: + # Parse the layer + config = parser_func(layer) + if isinstance(config, ReshapeConfig) or config == "Flt": + warnings.warn("Warning: The model contains a Flatten or Reshape layer. This may cause VGSLify to " + "misinterpret the model's input shape. It is recommended to manually verify and " + "adjust the input shape if necessary to ensure accuracy.") + + # Append the config if not None + if config: + configs.append(config) + else: + raise ValueError( + f"Unsupported layer type {layer_type.__name__} at {name}." + ) + + # Generate VGSL spec string from configs + return self.generate_vgsl(configs)
+ + +
+[docs] + def parse_input(self, layer: nn.Module) -> InputConfig: + """ + Parse the input shape from the first layer of the model. + + Parameters + ---------- + layer : nn.Module + The first layer of the PyTorch model. + + Returns + ------- + InputConfig + The configuration for the input layer. + + Raises + ------ + ValueError + If the input shape cannot be determined. + """ + batch_size = None # Placeholder for dynamic batch size + depth, height, width, channels = -1, -1, -1, -1 + + if hasattr(layer, 'in_channels'): + # Conv2d, Conv3d, BatchNorm2d, etc. + channels = layer.in_channels + elif hasattr(layer, 'in_features'): + # Linear, LSTM, GRU, etc. + channels = layer.in_features + elif hasattr(layer, 'input_size'): + # Some RNN layers + channels = layer.input_size + elif hasattr(layer, 'num_features'): + # Some normalization layers + channels = layer.num_features + + # Try to infer spatial dimensions if available + if isinstance(layer, (nn.Conv2d, nn.BatchNorm2d, nn.MaxPool2d, nn.AvgPool2d)): + height, width = None, None + elif isinstance(layer, (nn.Conv3d, nn.BatchNorm3d, nn.MaxPool3d, nn.AvgPool3d)): + depth, height, width = None, None, None + elif isinstance(layer, (nn.Linear, nn.LSTM, nn.GRU)): + depth, height, width = None, None, None + + if channels == -1: + raise ValueError("Unable to determine input shape from the first layer.") + + return InputConfig( + batch_size=batch_size, + depth=depth, + height=height, + width=width, + channels=channels + )
+ + + # Parser methods for different layer types +
+[docs] + def parse_conv2d(self, layer: nn.Conv2d) -> Conv2DConfig: + """ + Parse a Conv2d layer into a Conv2DConfig dataclass. + + Parameters + ---------- + layer : nn.Conv2d + The Conv2d layer to parse. + + Returns + ------- + Conv2DConfig + The configuration for the Conv2D layer. + """ + return Conv2DConfig( + activation="linear", # PyTorch typically separates activation + kernel_size=layer.kernel_size, + strides=layer.stride, + filters=layer.out_channels + )
+ + +
+[docs] + def parse_dense(self, layer: nn.Linear) -> DenseConfig: + """ + Parse a Linear layer into a DenseConfig dataclass. + + Parameters + ---------- + layer : nn.Linear + The Linear layer to parse. + + Returns + ------- + DenseConfig + The configuration for the Dense layer. + """ + return DenseConfig( + activation="linear", # PyTorch typically separates activation + units=layer.out_features + )
+ + +
+[docs] + def parse_rnn(self, layer: Union[nn.LSTM, nn.GRU]) -> RNNConfig: + """ + Parse an RNN layer (LSTM or GRU) into an RNNConfig dataclass. + + Parameters + ---------- + layer : Union[nn.LSTM, nn.GRU] + The RNN layer to parse. + + Returns + ------- + RNNConfig + The configuration for the RNN layer. + """ + if isinstance(layer, nn.LSTM): + rnn_type = 'lstm' + elif isinstance(layer, nn.GRU): + rnn_type = 'gru' + else: + raise ValueError(f"Unsupported RNN layer type {type(layer).__name__}.") + + return RNNConfig( + units=layer.hidden_size, + return_sequences=True, # PyTorch RNNs always return sequences by default + go_backwards=False, # PyTorch doesn't have a direct equivalent + dropout=layer.dropout, + recurrent_dropout=0, # PyTorch doesn't have recurrent dropout + rnn_type=rnn_type, + bidirectional=layer.bidirectional + )
+ + +
+[docs] + def parse_pooling(self, layer: Union[nn.MaxPool2d, nn.AvgPool2d]) -> Pooling2DConfig: + """ + Parse a Pooling layer into a Pooling2DConfig dataclass. + + Parameters + ---------- + layer : nn.MaxPool2d or nn.AvgPool2d + The Pooling layer to parse. + + Returns + ------- + Pooling2DConfig + The configuration for the Pooling layer. + """ + if isinstance(layer, nn.MaxPool2d): + pool_type = "max" + elif isinstance(layer, nn.AvgPool2d): + pool_type = "average" + + return Pooling2DConfig( + pool_type=pool_type, + pool_size=layer.kernel_size, + strides=layer.stride + )
+ + +
+[docs] + def parse_batchnorm(self, layer: nn.BatchNorm2d) -> str: + """ + Parse a BatchNorm2d layer. + + Parameters + ---------- + layer : nn.BatchNorm2d + The BatchNorm2d layer to parse. + + Returns + ------- + str + Indicates that the VGSL spec should include 'Bn'. + """ + return "Bn"
+ + +
+[docs] + def parse_dropout(self, layer: nn.Dropout) -> DropoutConfig: + """ + Parse a Dropout layer into a DropoutConfig dataclass. + + Parameters + ---------- + layer : nn.Dropout + The Dropout layer to parse. + + Returns + ------- + DropoutConfig + The configuration for the Dropout layer. + """ + return DropoutConfig( + rate=layer.p + )
+ + +
+[docs] + def parse_flatten(self, layer: nn.Flatten) -> str: + """ + Parse a Flatten layer. + + Parameters + ---------- + layer : nn.Flatten + The Flatten layer to parse. + + Returns + ------- + str + Indicates that the VGSL spec should include 'Flatten'. + """ + return "Flt"
+ + +
+[docs] + def parse_reshape(self, layer: Reshape) -> ReshapeConfig: + """ + Parse a Reshape layer into a ReshapeConfig dataclass. + + Parameters + ---------- + layer : Reshape + The custom Reshape layer to parse. + + Returns + ------- + ReshapeConfig + The configuration for the Reshape layer. + """ + target_shape = layer.target_shape + return ReshapeConfig( + target_shape=target_shape + )
+ + +
+[docs] + def parse_activation(self, layer: nn.Module) -> ActivationConfig: + """ + Parse an activation function. + + Parameters + ---------- + layer : nn.Module + The activation layer to parse. + + Returns + ------- + ActivationConfig + The configuration for the Activation layer. + """ + if isinstance(layer, nn.ReLU): + activation = "relu" + elif isinstance(layer, nn.Sigmoid): + activation = "sigmoid" + elif isinstance(layer, nn.Tanh): + activation = "tanh" + elif isinstance(layer, nn.Identity): + activation = "linear" + elif isinstance(layer, nn.Softmax): + activation = "softmax" + else: + activation = "linear" + + return ActivationConfig(activation=activation)
+
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/tensorflow/layers.html b/_modules/vgslify/tensorflow/layers.html new file mode 100644 index 0000000..c98308a --- /dev/null +++ b/_modules/vgslify/tensorflow/layers.html @@ -0,0 +1,404 @@ + + + + + + + + vgslify.tensorflow.layers — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.tensorflow.layers

+# Imports
+
+# > Standard library
+from typing import Tuple
+
+# > Third-party dependencies
+import tensorflow as tf
+
+# > Internal dependencies
+from vgslify.core.factory import LayerFactory
+from vgslify.core.config import (Conv2DConfig, Pooling2DConfig, DenseConfig,
+                                 RNNConfig, DropoutConfig, ReshapeConfig,
+                                 InputConfig)
+
+
+
+[docs] +class TensorFlowLayerFactory(LayerFactory): + """ + TensorFlowLayerFactory is responsible for creating TensorFlow-specific layers based on parsed + VGSL (Variable-size Graph Specification Language) specifications. + + This factory handles the creation of various types of layers, including convolutional layers, + pooling layers, RNN layers, dense layers, activation layers, and more. + + Attributes + ---------- + layers : list + A list of TensorFlow layers that have been added to the factory. + shape : tuple of int + The current shape of the tensor, excluding the batch size. + _input_shape : tuple of int or None + The original input shape provided during initialization. + """ + + def __init__(self, input_shape: Tuple[int, ...] = None): + """ + Initialize the TensorFlowLayerFactory. + + Parameters + ---------- + input_shape : tuple of int, optional + The input shape for the model, excluding batch size. + """ + super().__init__(input_shape, data_format='channels_last') + +
+[docs] + def build(self, name: str = "VGSL_Model") -> tf.keras.models.Model: + """ + Build the final model using the accumulated layers. + + Parameters + ---------- + name : str, optional + The name of the model, by default "VGSL_Model" + + Returns + ------- + tf.keras.models.Model + The constructed TensorFlow model. + + Raises + ------ + ValueError + If no layers have been added to the model. + ValueError + If no input shape has been specified for the model. + """ + if not self.layers: + raise ValueError("No layers added to the model.") + if not self._input_shape: + raise ValueError("No input shape specified for the model.") + + # If we do not have an input layer, add one + if not isinstance(self.layers[0], tf.keras.KerasTensor): + input_layer = tf.keras.Input(shape=self._input_shape) + self.layers.insert(0, input_layer) + + inputs = self.layers[0] + outputs = inputs + for layer in self.layers[1:]: + outputs = layer(outputs) + model = tf.keras.models.Model( + inputs=inputs, outputs=outputs, name=name) + return model
+ + + # Layer creation methods + def _input(self, config: InputConfig, input_shape: Tuple[int, ...]): + """ + Create a TensorFlow Input layer. + + Parameters + ---------- + config : InputConfig + Configuration object for the Input layer. + input_shape : tuple of int + The input shape for the layer. + + Returns + ------- + tf.keras.layers.Input + The created Input layer. + """ + return tf.keras.Input(shape=input_shape, batch_size=config.batch_size) + + def _conv2d(self, config: Conv2DConfig): + """ + Create a TensorFlow Conv2D layer. + + Parameters + ---------- + config : Conv2DConfig + Configuration object for the Conv2D layer. + + Returns + ------- + tf.keras.layers.Conv2D + The created Conv2D layer. + """ + return tf.keras.layers.Conv2D( + filters=config.filters, + kernel_size=config.kernel_size, + strides=config.strides, + padding='same', + activation=None + ) + + def _pooling2d(self, config: Pooling2DConfig): + """ + Create a TensorFlow Pooling2D layer. + + Parameters + ---------- + config : Pooling2DConfig + Configuration object for the Pooling2D layer. + + Returns + ------- + tf.keras.layers.Layer + The created Pooling2D layer (either MaxPooling2D or AveragePooling2D). + """ + if config.pool_type == 'max': + return tf.keras.layers.MaxPooling2D( + pool_size=config.pool_size, + strides=config.strides, + padding='same' + ) + if config.pool_type == 'avg': + return tf.keras.layers.AveragePooling2D( + pool_size=config.pool_size, + strides=config.strides, + padding='same' + ) + + def _dense(self, config: DenseConfig): + """ + Create a TensorFlow Dense layer. + + Parameters + ---------- + config : DenseConfig + Configuration object for the Dense layer. + + Returns + ------- + tf.keras.layers.Dense + The created Dense layer. + """ + return tf.keras.layers.Dense( + units=config.units, + activation=None + ) + + def _rnn(self, config: RNNConfig): + """ + Create a TensorFlow RNN layer (LSTM or GRU), either unidirectional or bidirectional. + + Parameters + ---------- + config : RNNConfig + Configuration object for the RNN layer. + + Returns + ------- + tf.keras.layers.Layer + The created RNN layer (either LSTM or GRU, unidirectional or bidirectional). + + Raises + ------ + ValueError + If an unsupported RNN type is specified. + """ + if config.rnn_type.upper() == 'L': + rnn_class = tf.keras.layers.LSTM + elif config.rnn_type.upper() == 'G': + rnn_class = tf.keras.layers.GRU + else: + raise ValueError(f"Unsupported RNN type: {config.rnn_type}") + + rnn_layer = rnn_class( + units=config.units, + return_sequences=config.return_sequences, + dropout=config.dropout, + recurrent_dropout=config.recurrent_dropout + ) + + if config.bidirectional: + return tf.keras.layers.Bidirectional( + rnn_layer, + merge_mode='concat' + ) + else: + return rnn_layer + + def _batchnorm(self): + """ + Create a TensorFlow BatchNormalization layer. + + Returns + ------- + tf.keras.layers.BatchNormalization + The created BatchNormalization layer. + """ + return tf.keras.layers.BatchNormalization() + + def _dropout(self, config: DropoutConfig): + """ + Create a TensorFlow Dropout layer. + + Parameters + ---------- + config : DropoutConfig + Configuration object for the Dropout layer. + + Returns + ------- + tf.keras.layers.Dropout + The created Dropout layer. + """ + return tf.keras.layers.Dropout(rate=config.rate) + + def _activation(self, activation_function: str): + """ + Create a TensorFlow activation layer. + + Parameters + ---------- + activation_function : str + Name of the activation function. + + Returns + ------- + tf.keras.layers.Activation + The created activation layer. + """ + return tf.keras.layers.Activation(activation=activation_function) + + def _reshape(self, config: ReshapeConfig): + """ + Create a TensorFlow Reshape layer. + + Parameters + ---------- + config : ReshapeConfig + Configuration object for the Reshape layer. + + Returns + ------- + tf.keras.layers.Reshape + The created Reshape layer. + """ + return tf.keras.layers.Reshape(target_shape=config.target_shape) + + def _flatten(self): + """ + Create a TensorFlow Flatten layer. + + Returns + ------- + tf.keras.layers.Flatten + The created Flatten layer. + """ + return tf.keras.layers.Flatten()
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/torch/layers.html b/_modules/vgslify/torch/layers.html new file mode 100644 index 0000000..71b237c --- /dev/null +++ b/_modules/vgslify/torch/layers.html @@ -0,0 +1,470 @@ + + + + + + + + vgslify.torch.layers — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.torch.layers

+# Imports
+
+# > Standard library
+from typing import Tuple
+
+# > Third-party dependencies
+import torch
+import torch.nn as nn
+
+# > Internal dependencies
+from vgslify.core.factory import LayerFactory
+from vgslify.core.config import (Conv2DConfig, Pooling2DConfig, DenseConfig,
+                                 RNNConfig, DropoutConfig, ReshapeConfig,
+                                 InputConfig)
+from vgslify.torch.reshape import Reshape
+
+
+[docs] +class TorchLayerFactory(LayerFactory): + """ + TorchLayerFactory is responsible for creating PyTorch-specific layers based on parsed + VGSL (Variable-size Graph Specification Language) specifications. + + This factory handles the creation of various types of layers, including convolutional layers, + pooling layers, RNN layers, dense layers, activation layers, and more. + + Attributes + ---------- + layers : list + A list of PyTorch layers that have been added to the factory. + shape : tuple of int + The current shape of the tensor, excluding the batch size. + _input_shape : tuple of int or None + The original input shape provided during initialization. + """ + + def __init__(self, input_shape: Tuple[int, ...] = None): + """ + Initialize the TorchLayerFactory. + + Parameters + ---------- + input_shape : tuple of int, optional + The input shape for the model, excluding batch size. + """ + super().__init__(input_shape, data_format='channels_first') + +
+[docs] + def build(self, name: str = "VGSL_Model") -> nn.Module: + """ + Build the final model using the accumulated layers. + + Parameters + ---------- + name : str, optional + The name of the model, by default "VGSL_Model" + + Returns + ------- + torch.nn.Module + The constructed PyTorch model. + + Raises + ------ + ValueError + If no layers have been added to the model. + ValueError + If no input shape has been specified for the model. + """ + if not self.layers: + raise ValueError("No layers added to the model.") + if not self._input_shape: + raise ValueError("No input shape specified for the model.") + + # model = VGSLModel(self.layers) + # TODO: Implement VGSLModel class + model = nn.Sequential(*self.layers) + model.__class__.__name__ = name + return model
+ + + # Layer creation methods + def _input(self, config: InputConfig, input_shape: Tuple[int, ...]): + """ + Create a PyTorch input layer (placeholder method). + + Parameters + ---------- + config : InputConfig + Configuration object (unused in PyTorch). + input_shape : tuple of int + The input shape for the layer. + + Returns + ------- + None + PyTorch doesn't require a separate input layer. + """ + return None + + def _conv2d(self, config: Conv2DConfig): + """ + Create a PyTorch Conv2d layer. + + Parameters + ---------- + config : Conv2DConfig + Configuration object for the Conv2D layer. + + Returns + ------- + torch.nn.Conv2d + The created Conv2d layer. + """ + padding = 'same' if torch.__version__ >= '1.7' else self._compute_same_padding( + config.kernel_size, config.strides) + return nn.Conv2d( + in_channels=self.shape[0], + out_channels=config.filters, + kernel_size=config.kernel_size, + stride=config.strides, + padding=padding + ) + + def _pooling2d(self, config: Pooling2DConfig): + """ + Create a PyTorch Pooling2d layer. + + Parameters + ---------- + config : Pooling2DConfig + Configuration object for the Pooling2D layer. + + Returns + ------- + torch.nn.Module + The created Pooling2d layer (either MaxPool2d or AvgPool2d). + """ + padding = self._compute_same_padding(config.pool_size, config.strides) + pool_layer = nn.MaxPool2d if config.pool_type == 'max' else nn.AvgPool2d + return pool_layer( + kernel_size=config.pool_size, + stride=config.strides, + padding=padding + ) + + def _dense(self, config: DenseConfig): + """ + Create a PyTorch Linear (Dense) layer. + + Parameters + ---------- + config : DenseConfig + Configuration object for the Dense layer. + + Returns + ------- + torch.nn.Linear + The created Linear layer. + """ + return nn.Linear(self.shape[-1], config.units) + + def _rnn(self, config: RNNConfig): + """ + Create a PyTorch RNN layer (LSTM or GRU), either unidirectional or bidirectional. + + Parameters + ---------- + config : RNNConfig + Configuration object for the RNN layer. + + Returns + ------- + torch.nn.Module + The created RNN layer (either LSTM or GRU, unidirectional or bidirectional). + + Raises + ------ + ValueError + If an unsupported RNN type is specified. + """ + if config.rnn_type.upper() == 'L': + rnn_class = nn.LSTM + elif config.rnn_type.upper() == 'G': + rnn_class = nn.GRU + else: + raise ValueError(f"Unsupported RNN type: {config.rnn_type}") + + return rnn_class( + input_size=self.shape[-1], + hidden_size=config.units, + num_layers=1, + batch_first=True, + dropout=config.dropout, + bidirectional=config.bidirectional + ) + + def _batchnorm(self): + """ + Create a PyTorch BatchNorm layer. + + Returns + ------- + torch.nn.Module + The created BatchNorm layer (either BatchNorm1d or BatchNorm2d). + + Raises + ------ + ValueError + If the input shape is not supported for BatchNorm. + """ + if len(self.shape) == 3: + return nn.BatchNorm2d(self.shape[0]) + elif len(self.shape) == 2: + return nn.BatchNorm1d(self.shape[0]) + else: + raise ValueError("Unsupported input shape for BatchNorm layer.") + + def _dropout(self, config: DropoutConfig): + """ + Create a PyTorch Dropout layer. + + Parameters + ---------- + config : DropoutConfig + Configuration object for the Dropout layer. + + Returns + ------- + nn.Dropout + The created Dropout layer. + """ + return nn.Dropout(p=config.rate) + + def _activation(self, activation_function: str): + """ + Create a PyTorch activation layer. + + Parameters + ---------- + activation_function : str + Name of the activation function. Supported values are 'softmax', 'tanh', 'relu', + 'linear', 'sigmoid'. + + Returns + ------- + nn.Module + The created activation layer. + + Raises + ------ + ValueError + If the activation function is not supported. + """ + activations = { + 'softmax': nn.Softmax(dim=1), + 'tanh': nn.Tanh(), + 'relu': nn.ReLU(), + 'linear': nn.Identity(), + 'sigmoid': nn.Sigmoid(), + } + if activation_function in activations: + return activations[activation_function] + else: + raise ValueError(f"Unsupported activation: {activation_function}") + + def _reshape(self, config: ReshapeConfig): + """ + Create a PyTorch Reshape layer. + + Parameters + ---------- + config : ReshapeConfig + Configuration object for the Reshape layer. + + Returns + ------- + nn.Module + The created Reshape layer. + """ + return Reshape(*config.target_shape) + + def _flatten(self): + """ + Create a PyTorch Flatten layer. + + Returns + ------- + nn.Flatten + The created Flatten layer. + """ + return nn.Flatten() + + # Helper methods + def _compute_same_padding(self, kernel_size, stride): + """ + Compute the padding size to achieve 'same' padding. + + Parameters + ---------- + kernel_size : int or tuple + Size of the kernel. + stride : int or tuple + Stride of the convolution. + + Returns + ------- + tuple + Padding size for height and width dimensions. + """ + if isinstance(kernel_size, int): + kernel_size = (kernel_size, kernel_size) + if isinstance(stride, int): + stride = (stride, stride) + padding = [] + for k, s in zip(kernel_size, stride): + p = ((k - 1) // 2) + padding.append(p) + return tuple(padding) + + def _get_activation_layer(self, activation_name: str): + """ + Return a PyTorch activation layer based on the activation name. + + Parameters + ---------- + activation_name : str + Name of the activation function. + + Returns + ------- + torch.nn.Module + The activation layer. + + Raises + ------ + ValueError + If the activation_name is not recognized. + """ + activations = { + 'softmax': nn.Softmax(dim=1), + 'tanh': nn.Tanh(), + 'relu': nn.ReLU(), + 'linear': nn.Identity(), + 'sigmoid': nn.Sigmoid(), + } + if activation_name in activations: + return activations[activation_name] + else: + raise ValueError(f"Unsupported activation: {activation_name}")
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_modules/vgslify/utils/model_to_spec.html b/_modules/vgslify/utils/model_to_spec.html new file mode 100644 index 0000000..d10d80d --- /dev/null +++ b/_modules/vgslify/utils/model_to_spec.html @@ -0,0 +1,177 @@ + + + + + + + + vgslify.utils.model_to_spec — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +

Source code for vgslify.utils.model_to_spec

+try:
+    import tensorflow as tf
+except ImportError:
+    tf = None
+try:
+    from torch import nn
+except ImportError:
+    nn = None
+
+
+
+[docs] +def model_to_spec(model) -> str: + """ + Convert a deep learning model (TensorFlow or PyTorch) to a VGSL spec string. + + Parameters + ---------- + model : Model + The deep learning model to be converted. Can be a TensorFlow model (tf.keras.models.Model) + or a PyTorch model (torch.nn.Module). + + Returns + ------- + str + VGSL spec string. + + Raises + ------ + ValueError + If the model is not supported or cannot be parsed. + + Examples + -------- + >>> from vgslify.utils import model_to_spec + >>> import tensorflow as tf + >>> model = tf.keras.models.load_model("path_to_model.h5") + >>> spec_string = model_to_spec(model) + >>> print(spec_string) + """ + + # Check if it's a TensorFlow model + if tf and isinstance(model, tf.keras.Model): + from vgslify.parsers.tf_parser import TensorFlowModelParser + parser = TensorFlowModelParser() + + # Check if it's a PyTorch model + if nn and isinstance(model, nn.Module): + from vgslify.parsers.torch_parser import TorchModelParser + parser = TorchModelParser() + + # Raise an error if the model is not recognized + if not parser: + raise ValueError( + f"Unsupported model type: {type(model).__name__}. Expected TensorFlow " + "or PyTorch model.") + + return parser.parse_model(model)
+ +
+ +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/_sources/convert_to_vgsl_spec.rst.txt b/_sources/convert_to_vgsl_spec.rst.txt new file mode 100644 index 0000000..c8cf4ea --- /dev/null +++ b/_sources/convert_to_vgsl_spec.rst.txt @@ -0,0 +1,67 @@ +Converting Models Back to VGSL Spec +=================================== + +VGSLify now includes the ability to convert a trained or existing model back into a VGSL specification string. This functionality is useful for: + +- Sharing model architectures in a concise format. +- Reproducing models from the VGSL spec string. +- Analyzing and understanding complex models via their VGSL representation. + +How It Works +------------ + +After you build or load a model using TensorFlow (with PyTorch support planned), you can convert it back into its VGSL specification string using the `model_to_spec()` function provided by VGSLify. + +Example: Convert a Model to VGSL Spec +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Here’s how you can convert an existing model to its VGSL spec: + +.. code-block:: python + + from vgslify.utils import model_to_spec + from tensorflow.keras.models import load_model + + # Load an existing TensorFlow model (previously saved) + model = load_model("path_to_your_model.keras") + + # Convert the model to VGSL spec + vgsl_spec = model_to_spec(model) + print(vgsl_spec) + +The above example will output the VGSL spec string corresponding to the architecture of the loaded model. + +Saving and Reusing VGSL Spec +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Once you've converted the model to a VGSL spec, you can easily save or share the spec string. This can be reused to rebuild the same model using VGSLify. + +1. **Save the VGSL Spec**: + + - Save the generated VGSL spec string to a file or store it in your project for later use. + +.. code-block:: python + + with open("model_spec.txt", "w") as f: + f.write(vgsl_spec) + +2. **Rebuild the Model from the Spec**: + + - You can use the saved VGSL spec to rebuild the exact same model at any time. + +.. code-block:: python + + from vgslify.generator import VGSLModelGenerator + + # Load the VGSL spec from file + with open("model_spec.txt", "r") as f: + vgsl_spec = f.read() + + # Rebuild the model from the spec + vgsl_gn = VGSLModelGenerator(backend="tensorflow") + model = vgsl_gn.generate_model(vgsl_spec) + model.summary() + +By using this functionality, you can quickly share, reproduce, and analyze deep learning models in a concise format. + + diff --git a/_sources/getting_started.rst.txt b/_sources/getting_started.rst.txt new file mode 100644 index 0000000..91793b6 --- /dev/null +++ b/_sources/getting_started.rst.txt @@ -0,0 +1,98 @@ +Getting Started +=============== + +Overview +-------- + +VGSLify makes it incredibly simple to define, build, and train deep learning models using the Variable-size Graph Specification Language (VGSL). VGSL strings serve as compact representations of neural network architectures, allowing you to build models in a single line. + +VGSLify abstracts the complexity of backend-specific syntax, enabling seamless switching between TensorFlow and, in future releases, PyTorch. This flexibility allows you to focus on model architecture and training without worrying about framework-specific implementations. + +What is a VGSL Specification? +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +A VGSL specification string concisely defines a neural network's architecture. The string encodes all layers, including input, convolutional layers, pooling, fully connected layers, and more. Each part of the string corresponds to a different component of the model. + +For example, the following VGSL string defines a simple convolutional neural network: + +``None,28,28,1 Cr3,3,32 Mp2,2,2,2 Rc Fr64 D20 Fs10`` + +This string represents a model with an input layer, a convolutional layer, a max pooling layer, a reshape layer, a dense (fully connected) layer, dropout, and an output layer. The model's structure is encoded entirely within this single line. + +Key functionality of VGSLify includes: + +- **Building models with a single line**: You can define complex architectures with a VGSL string, reducing the need for verbose code. +- **Switching between TensorFlow and PyTorch**: VGSLify supports both TensorFlow and (planned) PyTorch, allowing you to easily switch between backends. + +Simple Example: Building a Model +-------------------------------- + +Let’s walk through building a simple deep learning model using VGSLify. + +1. **Import the VGSLModelGenerator**: + + The `VGSLModelGenerator` class is the core component for building models from VGSL strings. Begin by importing it: + + .. code-block:: python + + from vgslify.generator import VGSLModelGenerator + +2. **Define the VGSL Specification String**: + + The VGSL spec string encodes the structure of the model. In this example, we will define a simple convolutional neural network suitable for handling MNIST digit images (28x28 grayscale): + + .. code-block:: python + + vgsl_spec = "None,28,28,1 Cr3,3,32 Mp2,2,2,2 Rc2 Fr64 D20 Fs10" + +3. **Build and View the Model**: + + Initialize the `VGSLModelGenerator` and use it to build the model based on the VGSL spec string: + + .. code-block:: python + + vgsl_gn = VGSLModelGenerator(backend="tensorflow") # Set backend to TensorFlow + model = vgsl_gn.generate_model(vgsl_spec) + model.summary() # View the model architecture + + This will generate the model and display a summary of its architecture, including all layers defined by the VGSL spec string. + +Explanation of Layers +--------------------- + +Let’s break down the layers defined by the VGSL specification string in our example: + +- **Input Layer**: ``None,28,28,1`` + - This defines the input shape of the model, which corresponds to grayscale images of size 28x28 pixels. The first dimension (`None`) allows for a variable batch size. + +- **Convolutional Layer**: ``Cr3,3,32`` + - This adds a 2D convolutional layer with a 3x3 kernel and 32 output filters, using ReLU activation (`r` for ReLU). + +- **MaxPooling Layer**: ``Mp2,2,2,2`` + - This reduces the spatial dimensions by applying 2x2 max pooling with a stride of 2x2, which downsamples the input by taking the maximum value over each 2x2 window. + +- **Reshape Layer**: ``Rc2`` + - Reshapes the output from the previous layer, collapsing the spatial dimensions into a single vector suitable for fully connected layers. + +- **Fully Connected Layer**: ``Fc64`` + - Adds a fully connected layer (dense layer) with 64 units. + +- **Dropout Layer**: ``D20`` + - Applies dropout with a 20% rate to prevent overfitting by randomly setting a portion of the inputs to zero during training. + +- **Output Layer**: ``Fs10`` + - Represents the output layer with 10 units (for 10 classes, such as the digits in MNIST) using softmax activation. + +This VGSL string provides a concise, human-readable format for specifying complex model architectures. VGSLify automatically translates this specification into a deep learning model that can be trained using TensorFlow. + +Next Steps +---------- + +Once you’ve built and explored a basic model, you can dive deeper into VGSLify's capabilities. Follow the [tutorials](tutorials.html) to explore more advanced use cases such as: + +- Using different VGSL spec strings to define custom architectures. +- Switching between TensorFlow and PyTorch backends (PyTorch support coming soon). +- Integrating VGSLify models into larger deep learning workflows. + +Check out the `API reference `_ for detailed information on all available classes, methods, and utilities in VGSLify. + diff --git a/_sources/index.rst.txt b/_sources/index.rst.txt new file mode 100644 index 0000000..44dc5ff --- /dev/null +++ b/_sources/index.rst.txt @@ -0,0 +1,53 @@ +VGSLify Documentation +===================== + +`vgslify` is a powerful tool for creating and managing models using the Variable-size Graph Specification Language (VGSL). It offers flexibility by supporting both TensorFlow and (planned) PyTorch backends, making it suitable for various deep learning workflows. + +This documentation provides a comprehensive guide to getting started with VGSLify, in-depth tutorials, and detailed API references for developers. + +Getting Started +--------------- +New to VGSLify? Follow these guides to install and start building models with VGSL: + +.. toctree:: + :maxdepth: 2 + :caption: Getting Started: + + introduction + installation + getting_started + tutorials + +User Guides +----------- +Explore advanced usage, examples, and details on the VGSL specification: + +.. toctree:: + :maxdepth: 2 + :caption: User Guides: + + supported_layers + convert_to_vgsl_spec + +API Reference +------------- +For developers looking for more technical details, the following API documentation will help you understand and use the library: + +.. toctree:: + :maxdepth: 2 + :caption: API Reference: + + source/vgslify + source/vgslify.core + source/vgslify.parser + source/vgslify.utils + source/vgslify.tensorflow + source/vgslify.torch + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/_sources/installation.rst.txt b/_sources/installation.rst.txt new file mode 100644 index 0000000..12571a0 --- /dev/null +++ b/_sources/installation.rst.txt @@ -0,0 +1,66 @@ +Installation +============ + +Prerequisites +------------- + +Before installing VGSLify, make sure your system meets the following requirements: + +- **Python Version**: VGSLify requires Python 3.8 or newer. Ensure that you have the correct version installed by running the following command: + + .. code-block:: bash + + python --version + +- **Required Packages**: + + - `pip`: Python's package manager is required to install VGSLify and its dependencies. + - **TensorFlow**: If you are using VGSLify with TensorFlow, you will need to install TensorFlow as a backend. + - **PyTorch**: PyTorch support is planned for future releases, but is not yet available in the current version. + +- **VGSLify is BYOB (Bring Your Own Backend)**: VGSLify itself does not include a deep learning framework. Users must install their preferred backend—TensorFlow for now, with PyTorch support planned in future releases. This approach gives you flexibility in choosing your backend. + +Installing VGSLify +------------------ + +You can install VGSLify in several ways, depending on whether you want the stable release or a development version. + +1. **Install the latest version via pip**: + + The easiest way to get VGSLify is by using `pip`. Run the following command in your terminal: + + .. code-block:: bash + + pip install vgslify + +2. **Install TensorFlow Backend**: + + VGSLify is a BYOB package, which means you will need to install a backend separately. If you want to use TensorFlow as the backend, you can install it with the following command: + + .. code-block:: bash + + pip install tensorflow + +3. **Install the Development Version from Source**: + + If you want to work with the development version or modify VGSLify, you can install it directly from the source repository. Follow these steps: + + .. code-block:: bash + + git clone https://github.com/TimKoornstra/vgslify.git + cd vgslify + pip install . + + This will install VGSLify and all of its dependencies in your environment. + +Verifying Installation +---------------------- + +After installation, you can verify that VGSLify has been successfully installed and is functioning correctly by running the following command: + +.. code-block:: bash + + python -c "import vgslify; print(vgslify.__version__)" + +This should print the installed version of VGSLify without any errors. If the version is displayed correctly, the installation is successful. + diff --git a/_sources/introduction.rst.txt b/_sources/introduction.rst.txt new file mode 100644 index 0000000..0da5f20 --- /dev/null +++ b/_sources/introduction.rst.txt @@ -0,0 +1,51 @@ +Introduction +============ + +Overview of VGSLify +------------------- + +VGSLify is a toolkit designed to simplify the creation, training, and interpretation of deep learning models through the use of the Variable-size Graph Specification Language (VGSL). VGSL, originally developed for Tesseract OCR, provides a compact and flexible way to define neural network architectures in string format. VGSLify builds on this idea and adds support for modern deep learning frameworks like TensorFlow (with PyTorch planned for future versions), offering a user-friendly interface to create and manage neural network models. + +What is VGSLify? +^^^^^^^^^^^^^^^^ + +VGSLify leverages the power of VGSL to let users define neural networks using simple, compact strings that specify layers, their configurations, and connections. This approach eliminates the need for verbose and complex code when defining model architectures, making it easier to iterate on design, experimentation, and deployment. With VGSLify, you can quickly prototype models and convert between VGSL strings and executable code in deep learning frameworks. + +VGSLify abstracts away the complexities of framework-specific syntax, allowing users to focus on model architecture and training. By supporting both TensorFlow and PyTorch (planned), it ensures flexibility for users who might prefer one framework over the other. + +Key Features +^^^^^^^^^^^^ + +VGSLify offers several key features to help streamline the process of deep learning model development: + +- **Supports TensorFlow and (planned) PyTorch backends**: VGSLify currently works with TensorFlow, with PyTorch support planned in future releases. + +- **Flexible model specification with VGSL**: VGSL is a compact language that allows for the definition of models with just a string, simplifying architecture description. Users can specify layers, input shapes, activations, and more in a single line. + +- **Easy conversion between VGSL specs and code**: VGSLify offers utilities to convert VGSL strings into fully functional TensorFlow models, making it easy to go from abstract model definitions to trainable models. It also includes tools for converting trained models back into VGSL spec strings for easy sharing and reproduction. + +Target Audience +^^^^^^^^^^^^^^^ + +VGSLify is aimed at data scientists, researchers, and developers who need a concise and flexible way to define, experiment with, and manage deep learning models. Whether you're a beginner looking for an easier way to get started with neural networks or an experienced developer seeking a faster way to prototype architectures, VGSLify provides a powerful and intuitive toolset. + +Why Use VGSLify? +---------------- + +VGSLify is designed to streamline the model creation process, helping users avoid common pain points in deep learning development: + +- **Reduces boilerplate code for defining models**: Instead of writing hundreds of lines of code to define your architecture, VGSLify allows you to express it in a single string. + +- **Streamlines model design, training, and evaluation**: The compact VGSL string format makes it easy to modify architectures, test different configurations, and train models without needing to refactor large amounts of code. + +- **Facilitates collaboration and reproducibility**: VGSLify allows users to share models in a concise, human-readable format, making it easier to reproduce results across different machines or by different users. + +Links to Documentation +---------------------- + +To get started with VGSLify or dive deeper into its capabilities, explore the following resources: + +- `Quick Start Guide `_: Learn how to quickly set up VGSLify, generate models, and begin training. +- `Tutorials `_: Step-by-step tutorials to guide you through common tasks and advanced features. +- `API Reference `_: Detailed documentation of VGSLify's classes, methods, and utilities. + diff --git a/_sources/source/modules.rst.txt b/_sources/source/modules.rst.txt new file mode 100644 index 0000000..800a621 --- /dev/null +++ b/_sources/source/modules.rst.txt @@ -0,0 +1,7 @@ +vgslify +======= + +.. toctree:: + :maxdepth: 4 + + vgslify diff --git a/_sources/source/vgslify.core.rst.txt b/_sources/source/vgslify.core.rst.txt new file mode 100644 index 0000000..9b8a506 --- /dev/null +++ b/_sources/source/vgslify.core.rst.txt @@ -0,0 +1,45 @@ +vgslify.core package +==================== + +Submodules +---------- + +vgslify.core.config module +-------------------------- + +.. automodule:: vgslify.core.config + :members: + :undoc-members: + :show-inheritance: + +vgslify.core.factory module +--------------------------- + +.. automodule:: vgslify.core.factory + :members: + :undoc-members: + :show-inheritance: + +vgslify.core.parser module +-------------------------- + +.. automodule:: vgslify.core.parser + :members: + :undoc-members: + :show-inheritance: + +vgslify.core.utils module +------------------------- + +.. automodule:: vgslify.core.utils + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.core + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/source/vgslify.parser.rst.txt b/_sources/source/vgslify.parser.rst.txt new file mode 100644 index 0000000..303f687 --- /dev/null +++ b/_sources/source/vgslify.parser.rst.txt @@ -0,0 +1,21 @@ +vgslify.parser package +====================== + +Submodules +---------- + +vgslify.parser.tf\_parser module +-------------------------------- + +.. automodule:: vgslify.parser.tf_parser + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.parser + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/source/vgslify.parsers.rst.txt b/_sources/source/vgslify.parsers.rst.txt new file mode 100644 index 0000000..327d545 --- /dev/null +++ b/_sources/source/vgslify.parsers.rst.txt @@ -0,0 +1,37 @@ +vgslify.parsers package +======================= + +Submodules +---------- + +vgslify.parsers.base\_parser module +----------------------------------- + +.. automodule:: vgslify.parsers.base_parser + :members: + :undoc-members: + :show-inheritance: + +vgslify.parsers.tf\_parser module +--------------------------------- + +.. automodule:: vgslify.parsers.tf_parser + :members: + :undoc-members: + :show-inheritance: + +vgslify.parsers.torch\_parser module +------------------------------------ + +.. automodule:: vgslify.parsers.torch_parser + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.parsers + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/source/vgslify.rst.txt b/_sources/source/vgslify.rst.txt new file mode 100644 index 0000000..a5ddcc4 --- /dev/null +++ b/_sources/source/vgslify.rst.txt @@ -0,0 +1,38 @@ +vgslify package +=============== + +Subpackages +----------- + +.. toctree:: + :maxdepth: 4 + + vgslify.core + vgslify.tensorflow + +Submodules +---------- + +vgslify.generator module +------------------------ + +.. automodule:: vgslify.generator + :members: + :undoc-members: + :show-inheritance: + +vgslify.parser module +--------------------- + +.. automodule:: vgslify.parser + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/source/vgslify.tensorflow.rst.txt b/_sources/source/vgslify.tensorflow.rst.txt new file mode 100644 index 0000000..4fd6b3c --- /dev/null +++ b/_sources/source/vgslify.tensorflow.rst.txt @@ -0,0 +1,21 @@ +vgslify.tensorflow package +========================== + +Submodules +---------- + +vgslify.tensorflow.layers module +-------------------------------- + +.. automodule:: vgslify.tensorflow.layers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.tensorflow + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/source/vgslify.torch.rst.txt b/_sources/source/vgslify.torch.rst.txt new file mode 100644 index 0000000..923d27c --- /dev/null +++ b/_sources/source/vgslify.torch.rst.txt @@ -0,0 +1,22 @@ +vgslify.torch package +========================== + +Submodules +---------- + +vgslify.torch.layers module +-------------------------------- + +.. automodule:: vgslify.torch.layers + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.torch + :members: + :undoc-members: + :show-inheritance: + diff --git a/_sources/source/vgslify.utils.rst.txt b/_sources/source/vgslify.utils.rst.txt new file mode 100644 index 0000000..28c6c4e --- /dev/null +++ b/_sources/source/vgslify.utils.rst.txt @@ -0,0 +1,21 @@ +vgslify.utils package +===================== + +Submodules +---------- + +vgslify.utils.model\_to\_spec module +------------------------------------ + +.. automodule:: vgslify.utils.model_to_spec + :members: + :undoc-members: + :show-inheritance: + +Module contents +--------------- + +.. automodule:: vgslify.utils + :members: + :undoc-members: + :show-inheritance: diff --git a/_sources/supported_layers.rst.txt b/_sources/supported_layers.rst.txt new file mode 100644 index 0000000..b6534c9 --- /dev/null +++ b/_sources/supported_layers.rst.txt @@ -0,0 +1,121 @@ +Supported Layers +================ + +VGSLify supports a range of layers that can be specified using the VGSL format. Each layer type has its own configuration format, allowing you to define models concisely and flexibly. This section provides an overview of the supported layers and their VGSL specifications. + +Layer Specifications +-------------------- + +**Input Layer** +^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `,,[,,]` +- **Description**: Defines the input shape for the model, where the first value is the batch size (set to `None` for variable), followed by the height, width, and optionally the depth and channels. +- **Example**: `None,28,28,1` + + - Defines an input layer with variable batch size, height and width of 28, and 1 channel (e.g., for grayscale images). + +**Conv2D Layer** +^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `C(s|t|r|l|m),,,[,,]` +- **Description**: Defines a 2D convolutional layer with a kernel size of `` by ``, optional strides `,`, and `` filters. Activation functions are specified as follows: + + - `s`: Sigmoid + - `t`: Tanh + - `r`: ReLU + - `l`: Linear + - `m`: Softmax + +- **Example**: `Cr3,3,32` + + - Adds a convolutional layer with ReLU activation, a 3x3 kernel, default strides (1,1), and 32 filters. + +**Pooling2D Layer** +^^^^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `

(,[,,])` + + - `Mp` for max-pooling, `Ap` for average pooling. + +- **Description**: Specifies a pooling operation, which reduces the spatial dimensions by applying a window of `` by `` and strides of `,`. If strides are not specified, they default to the pool size. +- **Example**: `Mp2,2,1,1` + + - Defines a max-pooling layer with a pool size of 2x2 and strides of 1x1. + +**Dense (Fully Connected) Layer** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `F(s|t|r|l|m)` +- **Description**: Defines a fully connected (dense) layer with `` units. The non-linearity can be: + + - `s`: Sigmoid + - `t`: Tanh + - `r`: ReLU + - `l`: Linear + - `m`: Softmax + +- **Example**: `Fr64` + + - Adds a dense layer with 64 units and ReLU activation. + +**RNN Layer (LSTM/GRU/Bidirectional)** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `L(f|r)[s][,D,Rd]` for LSTM/GRU, `B(g|l)[,D,Rd]` for Bidirectional RNN +- **Description**: Specifies an RNN layer with `n` units. The optional dropout `D` and recurrent dropout `Rd` rates can be included. + + - `L`: LSTM + - `G`: GRU + - `B`: Bidirectional + - `f`: Forward direction, `r`: Reverse direction, `g`: GRU, `l`: LSTM + +- **Example**: `Lf64,D50,Rd25` + + - Defines an LSTM layer with 64 units, 50% dropout, and 25% recurrent dropout. + +**Dropout Layer** +^^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `D` +- **Description**: Specifies a dropout layer, where `` is the dropout percentage (0–100). +- **Example**: `D50` + + - Adds a dropout layer with a 50% dropout rate. + +**Output Layer** +^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `O(2|1|0)(l|s)` +- **Description**: Defines the output layer. The first value (`2`, `1`, or `0`) specifies whether the output is 2D, 1D, or scalar, followed by the activation type (`l`: linear, `s`: softmax), and the number of output units (`n`). +- **Example**: `O1s10` + + - Defines a softmax output layer with 10 classes for a 1D sequence. + +**Reshape Layer** +^^^^^^^^^^^^^^^^^ + +- **VGSL Spec**: `Rc2`, `Rc3`, or `R,,` +- **Description**: The Reshape layer reshapes the output tensor from the previous layer. It has two primary functions: + + - **Rc2**: Collapses the spatial dimensions (height, width, and channels) into a 2D tensor. This is typically used when transitioning to a fully connected (dense) layer. + + - Example: Reshaping from `(batch_size, height, width, channels)` to `(batch_size, height * width * channels)`. + + - **Rc3**: Collapses the spatial dimensions into a 3D tensor suitable for RNN layers. This creates a 3D tensor in the form of `(batch_size, time_steps, features)`. + + - Example: Reshaping from `(batch_size, height, width, channels)` to `(batch_size, height * width, channels)` for input to LSTM or GRU layers. + + - **R,,**: Directly reshapes to the specified target shape. + +- **Example**: + + - `Rc2` collapses the output from `(None, 8, 8, 64)` to `(None, 4096)` for a fully connected layer. + - `Rc3` collapses the output from `(None, 8, 8, 64)` to `(None, 64, 64)` for input to an RNN layer. + - `R64,64,3` reshapes the output to `(None, 64, 64, 3)`. + +More Examples +------------- + +Explore additional examples and advanced configurations in the `tutorials `_. + diff --git a/_sources/tutorials.rst.txt b/_sources/tutorials.rst.txt new file mode 100644 index 0000000..526be2c --- /dev/null +++ b/_sources/tutorials.rst.txt @@ -0,0 +1,184 @@ +Tutorials +========= + +This section provides hands-on tutorials for using VGSLify to build and train deep learning models. Follow these step-by-step guides to get familiar with how VGSLify simplifies model creation through VGSL specifications. + +Tutorial 1: Building a CNN for Image Classification +--------------------------------------------------- + +Overview +~~~~~~~~ + +In this tutorial, you will build a Convolutional Neural Network (CNN) for image classification using the CIFAR-10 dataset. We will define the model using a VGSL spec string, which allows us to specify the architecture in a concise, human-readable format. By the end of this tutorial, you will have a fully trained CNN model for image classification. + +Step-by-Step Instructions +~~~~~~~~~~~~~~~~~~~~~~~~~ + +1. **Import required libraries**: + + Begin by importing the necessary libraries for TensorFlow and VGSLify. + + .. code-block:: python + + import tensorflow as tf + from vgslify.generator import VGSLModelGenerator + +2. **Load and preprocess the dataset**: + + CIFAR-10 is a dataset of 60,000 32x32 color images in 10 classes, with 6,000 images per class. You can load and preprocess the dataset as follows: + + .. code-block:: python + + (x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data() + + # Normalize the images to the range [0, 1] + x_train, x_test = x_train / 255.0, x_test / 255.0 + + # Convert labels to one-hot encoding + y_train = tf.keras.utils.to_categorical(y_train, 10) + y_test = tf.keras.utils.to_categorical(y_test, 10) + +3. **Define the VGSL spec string for the CNN**: + + The VGSL spec string defines the layers of the CNN. Here's a simple CNN architecture: + + .. code-block:: python + + vgsl_spec = "None,32,32,3 Cr3,3,32 Mp2,2,2,2 Cr3,3,64 Mp2,2,2,2 Rc2 Fc128 D25 Fs10" + + Explanation: + + - `None,32,32,3`: Input layer for images of size 32x32 with 3 color channels (RGB). + - `Cr3,3,32`: Convolutional layer with a 3x3 filter, ReLU activation, and 32 filters. + - `Mp2,2,2,2`: MaxPooling layer with a 2x2 pool size and 2x2 strides. + - `Cr3,3,64`: Second convolutional layer with 64 filters. + - `Rc2`: Reshape layer to flatten the output for the fully connected layer. + - `Fc128`: Fully connected (dense) layer with 128 units. + - `D25`: Dropout layer with a 25% dropout rate. + - `Fs10`: Output layer with 10 units and softmax activation for classification into 10 classes. + +4. **Build and compile the model**: + + Use VGSLify to build and compile the model. This step generates the CNN architecture based on the VGSL string and compiles it for training. + + .. code-block:: python + + vgsl_gn = VGSLModelGenerator(backend="tensorflow") + model = vgsl_gn.generate_model(vgsl_spec) + + model.compile(optimizer='adam', + loss='categorical_crossentropy', + metrics=['accuracy']) + +5. **Train the model**: + + Now, train the CNN on the CIFAR-10 training set. You can adjust the batch size and number of epochs as needed. + + .. code-block:: python + + history = model.fit(x_train, y_train, epochs=10, batch_size=64, validation_data=(x_test, y_test)) + +6. **Evaluate the model performance**: + + After training, evaluate the model on the test set to see how well it performs. + + .. code-block:: python + + test_loss, test_acc = model.evaluate(x_test, y_test) + print(f'Test accuracy: {test_acc}') + + You can also plot the training history to visualize how the accuracy and loss evolve over time. + +Tutorial 2: Creating an LSTM for Sequence Prediction +---------------------------------------------------- + +Overview +~~~~~~~~ + +In this tutorial, you will build an LSTM (Long Short-Term Memory) model using VGSLify to predict the next value in a sequence. This is commonly used in time-series forecasting. We will generate synthetic data, define an LSTM model using a VGSL string, and train the model to predict future values in the sequence. + +Step-by-Step Instructions +~~~~~~~~~~~~~~~~~~~~~~~~~ + +1. **Import necessary libraries**: + + .. code-block:: python + + import numpy as np + from vgslify.generator import VGSLModelGenerator + +2. **Generate synthetic sequence data**: + + For this example, let's generate a sine wave as our synthetic sequence data. The LSTM will learn to predict the next value in this sequence. + + .. code-block:: python + + def generate_sine_wave(seq_length=1000): + x = np.arange(seq_length) + y = np.sin(x / 20.0) + return y + + sine_wave = generate_sine_wave() + + # Prepare the data for LSTM input + def create_sequences(data, seq_length): + x = [] + y = [] + for i in range(len(data) - seq_length): + x.append(data[i:i+seq_length]) + y.append(data[i+seq_length]) + return np.array(x), np.array(y) + + seq_length = 50 + x_train, y_train = create_sequences(sine_wave, seq_length) + + x_train = np.expand_dims(x_train, axis=-1) # LSTM expects input shape (batch, time steps, features) + y_train = np.expand_dims(y_train, axis=-1) + +3. **Define the VGSL spec string for the LSTM model**: + + Here's the VGSL string to define an LSTM with 50 units, followed by dropout and an output layer: + + .. code-block:: python + + vgsl_spec = f"None,{seq_length},{x_train.shape[1]} Lf50 D20 Fl1" + + Explanation: + + - `None,seq_length,x_train.shape[1]`: Input shape with 50 sequence length and 50 features. + - `Lf50`: LSTM with 50 units, without returning sequences. + - `D20`: Dropout layer with 20% dropout rate. + - `Fl1`: Output layer with 1 unit and linear activation for sequence prediction. + +4. **Build and compile the model**: + + .. code-block:: python + + vgsl_gn = VGSLModelGenerator(backend="tensorflow") + model = vgsl_gn.generate_model(vgsl_spec) + + model.compile(optimizer='adam', + loss='mean_squared_error') + +5. **Train the model**: + + Train the model to predict the next value in the sine wave sequence. + + .. code-block:: python + + history = model.fit(x_train, y_train, epochs=20, batch_size=64) + +6. **Evaluate the model**: + + Once training is complete, evaluate the model by plotting the true vs predicted values in the sine wave sequence. + + .. code-block:: python + + y_pred = model.predict(x_train) + + import matplotlib.pyplot as plt + plt.plot(y_train, label='True') + plt.plot(y_pred, label='Predicted') + plt.legend() + plt.show() + diff --git a/_static/_sphinx_javascript_frameworks_compat.js b/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 0000000..8141580 --- /dev/null +++ b/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/_static/basic.css b/_static/basic.css new file mode 100644 index 0000000..f316efc --- /dev/null +++ b/_static/basic.css @@ -0,0 +1,925 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +a:visited { + color: #551A8B; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} + +nav.contents, +aside.topic, +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ + +nav.contents, +aside.topic, +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +nav.contents > :last-child, +aside.topic > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +nav.contents::after, +aside.topic::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} + +aside.footnote > span, +div.citation > span { + float: left; +} +aside.footnote > span:last-of-type, +div.citation > span:last-of-type { + padding-right: 0.5em; +} +aside.footnote > p { + margin-left: 2em; +} +div.citation > p { + margin-left: 4em; +} +aside.footnote > p:last-of-type, +div.citation > p:last-of-type { + margin-bottom: 0em; +} +aside.footnote > p:last-of-type:after, +div.citation > p:last-of-type:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.sig dd { + margin-top: 0px; + margin-bottom: 0px; +} + +.sig dl { + margin-top: 0px; + margin-bottom: 0px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +.translated { + background-color: rgba(207, 255, 207, 0.2) +} + +.untranslated { + background-color: rgba(255, 207, 207, 0.2) +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/_static/css/badge_only.css b/_static/css/badge_only.css new file mode 100644 index 0000000..c718cee --- /dev/null +++ b/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff b/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 0000000..6cb6000 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Bold.woff2 b/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 0000000..7059e23 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff b/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 0000000..f815f63 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/_static/css/fonts/Roboto-Slab-Regular.woff2 b/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 0000000..f2c76e5 Binary files /dev/null and b/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/_static/css/fonts/fontawesome-webfont.eot b/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 0000000..e9f60ca Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/_static/css/fonts/fontawesome-webfont.svg b/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 0000000..855c845 --- /dev/null +++ b/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_static/css/fonts/fontawesome-webfont.ttf b/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 0000000..35acda2 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/_static/css/fonts/fontawesome-webfont.woff b/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 0000000..400014a Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/_static/css/fonts/fontawesome-webfont.woff2 b/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 0000000..4d13fc6 Binary files /dev/null and b/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/_static/css/fonts/lato-bold-italic.woff b/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 0000000..88ad05b Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff differ diff --git a/_static/css/fonts/lato-bold-italic.woff2 b/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 0000000..c4e3d80 Binary files /dev/null and b/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/_static/css/fonts/lato-bold.woff b/_static/css/fonts/lato-bold.woff new file mode 100644 index 0000000..c6dff51 Binary files /dev/null and b/_static/css/fonts/lato-bold.woff differ diff --git a/_static/css/fonts/lato-bold.woff2 b/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 0000000..bb19504 Binary files /dev/null and b/_static/css/fonts/lato-bold.woff2 differ diff --git a/_static/css/fonts/lato-normal-italic.woff b/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 0000000..76114bc Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff differ diff --git a/_static/css/fonts/lato-normal-italic.woff2 b/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 0000000..3404f37 Binary files /dev/null and b/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/_static/css/fonts/lato-normal.woff b/_static/css/fonts/lato-normal.woff new file mode 100644 index 0000000..ae1307f Binary files /dev/null and b/_static/css/fonts/lato-normal.woff differ diff --git a/_static/css/fonts/lato-normal.woff2 b/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 0000000..3bf9843 Binary files /dev/null and b/_static/css/fonts/lato-normal.woff2 differ diff --git a/_static/css/theme.css b/_static/css/theme.css new file mode 100644 index 0000000..19a446a --- /dev/null +++ b/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 0000000..4d67807 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 0000000..7eabc9d --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,13 @@ +const DOCUMENTATION_OPTIONS = { + VERSION: '0.10.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/fonts/Lato/lato-bold.eot b/_static/fonts/Lato/lato-bold.eot new file mode 100644 index 0000000..3361183 Binary files /dev/null and b/_static/fonts/Lato/lato-bold.eot differ diff --git a/_static/fonts/Lato/lato-bold.ttf b/_static/fonts/Lato/lato-bold.ttf new file mode 100644 index 0000000..29f691d Binary files /dev/null and b/_static/fonts/Lato/lato-bold.ttf differ diff --git a/_static/fonts/Lato/lato-bold.woff b/_static/fonts/Lato/lato-bold.woff new file mode 100644 index 0000000..c6dff51 Binary files /dev/null and b/_static/fonts/Lato/lato-bold.woff differ diff --git a/_static/fonts/Lato/lato-bold.woff2 b/_static/fonts/Lato/lato-bold.woff2 new file mode 100644 index 0000000..bb19504 Binary files /dev/null and b/_static/fonts/Lato/lato-bold.woff2 differ diff --git a/_static/fonts/Lato/lato-bolditalic.eot b/_static/fonts/Lato/lato-bolditalic.eot new file mode 100644 index 0000000..3d41549 Binary files /dev/null and b/_static/fonts/Lato/lato-bolditalic.eot differ diff --git a/_static/fonts/Lato/lato-bolditalic.ttf b/_static/fonts/Lato/lato-bolditalic.ttf new file mode 100644 index 0000000..f402040 Binary files /dev/null and b/_static/fonts/Lato/lato-bolditalic.ttf differ diff --git a/_static/fonts/Lato/lato-bolditalic.woff b/_static/fonts/Lato/lato-bolditalic.woff new file mode 100644 index 0000000..88ad05b Binary files /dev/null and b/_static/fonts/Lato/lato-bolditalic.woff differ diff --git a/_static/fonts/Lato/lato-bolditalic.woff2 b/_static/fonts/Lato/lato-bolditalic.woff2 new file mode 100644 index 0000000..c4e3d80 Binary files /dev/null and b/_static/fonts/Lato/lato-bolditalic.woff2 differ diff --git a/_static/fonts/Lato/lato-italic.eot b/_static/fonts/Lato/lato-italic.eot new file mode 100644 index 0000000..3f82642 Binary files /dev/null and b/_static/fonts/Lato/lato-italic.eot differ diff --git a/_static/fonts/Lato/lato-italic.ttf b/_static/fonts/Lato/lato-italic.ttf new file mode 100644 index 0000000..b4bfc9b Binary files /dev/null and b/_static/fonts/Lato/lato-italic.ttf differ diff --git a/_static/fonts/Lato/lato-italic.woff b/_static/fonts/Lato/lato-italic.woff new file mode 100644 index 0000000..76114bc Binary files /dev/null and b/_static/fonts/Lato/lato-italic.woff differ diff --git a/_static/fonts/Lato/lato-italic.woff2 b/_static/fonts/Lato/lato-italic.woff2 new file mode 100644 index 0000000..3404f37 Binary files /dev/null and b/_static/fonts/Lato/lato-italic.woff2 differ diff --git a/_static/fonts/Lato/lato-regular.eot b/_static/fonts/Lato/lato-regular.eot new file mode 100644 index 0000000..11e3f2a Binary files /dev/null and b/_static/fonts/Lato/lato-regular.eot differ diff --git a/_static/fonts/Lato/lato-regular.ttf b/_static/fonts/Lato/lato-regular.ttf new file mode 100644 index 0000000..74decd9 Binary files /dev/null and b/_static/fonts/Lato/lato-regular.ttf differ diff --git a/_static/fonts/Lato/lato-regular.woff b/_static/fonts/Lato/lato-regular.woff new file mode 100644 index 0000000..ae1307f Binary files /dev/null and b/_static/fonts/Lato/lato-regular.woff differ diff --git a/_static/fonts/Lato/lato-regular.woff2 b/_static/fonts/Lato/lato-regular.woff2 new file mode 100644 index 0000000..3bf9843 Binary files /dev/null and b/_static/fonts/Lato/lato-regular.woff2 differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot new file mode 100644 index 0000000..79dc8ef Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf new file mode 100644 index 0000000..df5d1df Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff new file mode 100644 index 0000000..6cb6000 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 new file mode 100644 index 0000000..7059e23 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot new file mode 100644 index 0000000..2f7ca78 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf new file mode 100644 index 0000000..eb52a79 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff new file mode 100644 index 0000000..f815f63 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff differ diff --git a/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 new file mode 100644 index 0000000..f2c76e5 Binary files /dev/null and b/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 differ diff --git a/_static/jquery.js b/_static/jquery.js new file mode 100644 index 0000000..c4c6022 --- /dev/null +++ b/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="

",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/html5shiv.min.js b/_static/js/html5shiv.min.js new file mode 100644 index 0000000..cd1c674 --- /dev/null +++ b/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/_static/js/theme.js b/_static/js/theme.js new file mode 100644 index 0000000..1fddb6e --- /dev/null +++ b/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t +
Languages
+ ${config.projects.translations + .map( + (translation) => ` +
+ ${translation.language.code} +
+ `, + ) + .join("\n")} + + `; + return languagesHTML; + } + + function renderVersions(config) { + if (!config.versions.active.length) { + return ""; + } + const versionsHTML = ` +
+
Versions
+ ${config.versions.active + .map( + (version) => ` +
+ ${version.slug} +
+ `, + ) + .join("\n")} +
+ `; + return versionsHTML; + } + + function renderDownloads(config) { + if (!Object.keys(config.versions.current.downloads).length) { + return ""; + } + const downloadsNameDisplay = { + pdf: "PDF", + epub: "Epub", + htmlzip: "HTML", + }; + + const downloadsHTML = ` +
+
Downloads
+ ${Object.entries(config.versions.current.downloads) + .map( + ([name, url]) => ` +
+ ${downloadsNameDisplay[name]} +
+ `, + ) + .join("\n")} +
+ `; + return downloadsHTML; + } + + document.addEventListener("readthedocs-addons-data-ready", function (event) { + const config = event.detail.data(); + + const flyout = ` +
+ + Read the Docs + v: ${config.versions.current.slug} + + +
+
+ ${renderLanguages(config)} + ${renderVersions(config)} + ${renderDownloads(config)} +
+
On Read the Docs
+
+ Project Home +
+
+ Builds +
+
+ Downloads +
+
+
+
Search
+
+
+ +
+
+
+
+ + Hosted by Read the Docs + +
+
+ `; + + // Inject the generated flyout into the body HTML element. + document.body.insertAdjacentHTML("beforeend", flyout); + + // Trigger the Read the Docs Addons Search modal when clicking on the "Search docs" input from inside the flyout. + document + .querySelector("#flyout-search-form") + .addEventListener("focusin", () => { + const event = new CustomEvent("readthedocs-search-show"); + document.dispatchEvent(event); + }); + }) +} + +if (themeLanguageSelector || themeVersionSelector) { + function onSelectorSwitch(event) { + const option = event.target.selectedIndex; + const item = event.target.options[option]; + window.location.href = item.dataset.url; + } + + document.addEventListener("readthedocs-addons-data-ready", function (event) { + const config = event.detail.data(); + + const versionSwitch = document.querySelector( + "div.switch-menus > div.version-switch", + ); + if (themeVersionSelector) { + let versions = config.versions.active; + if (config.versions.current.hidden || config.versions.current.type === "external") { + versions.unshift(config.versions.current); + } + const versionSelect = ` + + `; + + versionSwitch.innerHTML = versionSelect; + versionSwitch.firstElementChild.addEventListener("change", onSelectorSwitch); + } + + const languageSwitch = document.querySelector( + "div.switch-menus > div.language-switch", + ); + + if (themeLanguageSelector) { + if (config.projects.translations.length) { + // Add the current language to the options on the selector + let languages = config.projects.translations.concat( + config.projects.current, + ); + languages = languages.sort((a, b) => + a.language.name.localeCompare(b.language.name), + ); + + const languageSelect = ` + + `; + + languageSwitch.innerHTML = languageSelect; + languageSwitch.firstElementChild.addEventListener("change", onSelectorSwitch); + } + else { + languageSwitch.remove(); + } + } + }); +} + +document.addEventListener("readthedocs-addons-data-ready", function (event) { + // Trigger the Read the Docs Addons Search modal when clicking on "Search docs" input from the topnav. + document + .querySelector("[role='search'] input") + .addEventListener("focusin", () => { + const event = new CustomEvent("readthedocs-search-show"); + document.dispatchEvent(event); + }); +}); \ No newline at end of file diff --git a/_static/language_data.js b/_static/language_data.js new file mode 100644 index 0000000..367b8ed --- /dev/null +++ b/_static/language_data.js @@ -0,0 +1,199 @@ +/* + * language_data.js + * ~~~~~~~~~~~~~~~~ + * + * This script contains the language-specific data used by searchtools.js, + * namely the list of stopwords, stemmer, scorer and splitter. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; + + +/* Non-minified version is copied as a separate JS file, if available */ + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + diff --git a/_static/minus.png b/_static/minus.png new file mode 100644 index 0000000..d96755f Binary files /dev/null and b/_static/minus.png differ diff --git a/_static/plus.png b/_static/plus.png new file mode 100644 index 0000000..7107cec Binary files /dev/null and b/_static/plus.png differ diff --git a/_static/pygments.css b/_static/pygments.css new file mode 100644 index 0000000..84ab303 --- /dev/null +++ b/_static/pygments.css @@ -0,0 +1,75 @@ +pre { line-height: 125%; } +td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; } +td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } +.highlight .hll { background-color: #ffffcc } +.highlight { background: #f8f8f8; } +.highlight .c { color: #3D7B7B; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #008000; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #9C6500 } /* Comment.Preproc */ +.highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #E40000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #008400 } /* Generic.Inserted */ +.highlight .go { color: #717171 } /* Generic.Output */ +.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #008000 } /* Keyword.Pseudo */ +.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #B00040 } /* Keyword.Type */ +.highlight .m { color: #666666 } /* Literal.Number */ +.highlight .s { color: #BA2121 } /* Literal.String */ +.highlight .na { color: #687822 } /* Name.Attribute */ +.highlight .nb { color: #008000 } /* Name.Builtin */ +.highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ +.highlight .no { color: #880000 } /* Name.Constant */ +.highlight .nd { color: #AA22FF } /* Name.Decorator */ +.highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */ +.highlight .nf { color: #0000FF } /* Name.Function */ +.highlight .nl { color: #767600 } /* Name.Label */ +.highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #19177C } /* Name.Variable */ +.highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #666666 } /* Literal.Number.Bin */ +.highlight .mf { color: #666666 } /* Literal.Number.Float */ +.highlight .mh { color: #666666 } /* Literal.Number.Hex */ +.highlight .mi { color: #666666 } /* Literal.Number.Integer */ +.highlight .mo { color: #666666 } /* Literal.Number.Oct */ +.highlight .sa { color: #BA2121 } /* Literal.String.Affix */ +.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ +.highlight .sc { color: #BA2121 } /* Literal.String.Char */ +.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ +.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #BA2121 } /* Literal.String.Double */ +.highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ +.highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */ +.highlight .sx { color: #008000 } /* Literal.String.Other */ +.highlight .sr { color: #A45A77 } /* Literal.String.Regex */ +.highlight .s1 { color: #BA2121 } /* Literal.String.Single */ +.highlight .ss { color: #19177C } /* Literal.String.Symbol */ +.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #0000FF } /* Name.Function.Magic */ +.highlight .vc { color: #19177C } /* Name.Variable.Class */ +.highlight .vg { color: #19177C } /* Name.Variable.Global */ +.highlight .vi { color: #19177C } /* Name.Variable.Instance */ +.highlight .vm { color: #19177C } /* Name.Variable.Magic */ +.highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_static/searchtools.js b/_static/searchtools.js new file mode 100644 index 0000000..b08d58c --- /dev/null +++ b/_static/searchtools.js @@ -0,0 +1,620 @@ +/* + * searchtools.js + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for the full-text search. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +/** + * Simple result scoring code. + */ +if (typeof Scorer === "undefined") { + var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [docname, title, anchor, descr, score, filename] + // and returns the new score. + /* + score: result => { + const [docname, title, anchor, descr, score, filename] = result + return score + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: { + 0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5, // used to be unimportantResults + }, + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + partialTitle: 7, + // query found in terms + term: 5, + partialTerm: 2, + }; +} + +const _removeChildren = (element) => { + while (element && element.lastChild) element.removeChild(element.lastChild); +}; + +/** + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping + */ +const _escapeRegExp = (string) => + string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string + +const _displayItem = (item, searchTerms, highlightTerms) => { + const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; + const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; + const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; + const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; + + const [docName, title, anchor, descr, score, _filename] = item; + + let listItem = document.createElement("li"); + let requestUrl; + let linkUrl; + if (docBuilder === "dirhtml") { + // dirhtml builder + let dirname = docName + "/"; + if (dirname.match(/\/index\/$/)) + dirname = dirname.substring(0, dirname.length - 6); + else if (dirname === "index/") dirname = ""; + requestUrl = contentRoot + dirname; + linkUrl = requestUrl; + } else { + // normal html builders + requestUrl = contentRoot + docName + docFileSuffix; + linkUrl = docName + docLinkSuffix; + } + let linkEl = listItem.appendChild(document.createElement("a")); + linkEl.href = linkUrl + anchor; + linkEl.dataset.score = score; + linkEl.innerHTML = title; + if (descr) { + listItem.appendChild(document.createElement("span")).innerHTML = + " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } + else if (showSearchSummary) + fetch(requestUrl) + .then((responseData) => responseData.text()) + .then((data) => { + if (data) + listItem.appendChild( + Search.makeSearchSummary(data, searchTerms, anchor) + ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + }); + Search.output.appendChild(listItem); +}; +const _finishSearch = (resultCount) => { + Search.stopPulse(); + Search.title.innerText = _("Search Results"); + if (!resultCount) + Search.status.innerText = Documentation.gettext( + "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." + ); + else + Search.status.innerText = _( + "Search finished, found ${resultCount} page(s) matching the search query." + ).replace('${resultCount}', resultCount); +}; +const _displayNextItem = ( + results, + resultCount, + searchTerms, + highlightTerms, +) => { + // results left, load the summary and display it + // this is intended to be dynamic (don't sub resultsCount) + if (results.length) { + _displayItem(results.pop(), searchTerms, highlightTerms); + setTimeout( + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), + 5 + ); + } + // search finished, update title and status message + else _finishSearch(resultCount); +}; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; + +/** + * Default splitQuery function. Can be overridden in ``sphinx.search`` with a + * custom function per language. + * + * The regular expression works by splitting the string on consecutive characters + * that are not Unicode letters, numbers, underscores, or emoji characters. + * This is the same as ``\W+`` in Python, preserving the surrogate pair area. + */ +if (typeof splitQuery === "undefined") { + var splitQuery = (query) => query + .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu) + .filter(term => term) // remove remaining empty strings +} + +/** + * Search Module + */ +const Search = { + _index: null, + _queued_query: null, + _pulse_status: -1, + + htmlToText: (htmlString, anchor) => { + const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content + const docContent = htmlElement.querySelector('[role="main"]'); + if (docContent) return docContent.textContent; + + console.warn( + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." + ); + return ""; + }, + + init: () => { + const query = new URLSearchParams(window.location.search).get("q"); + document + .querySelectorAll('input[name="q"]') + .forEach((el) => (el.value = query)); + if (query) Search.performSearch(query); + }, + + loadIndex: (url) => + (document.body.appendChild(document.createElement("script")).src = url), + + setIndex: (index) => { + Search._index = index; + if (Search._queued_query !== null) { + const query = Search._queued_query; + Search._queued_query = null; + Search.query(query); + } + }, + + hasIndex: () => Search._index !== null, + + deferQuery: (query) => (Search._queued_query = query), + + stopPulse: () => (Search._pulse_status = -1), + + startPulse: () => { + if (Search._pulse_status >= 0) return; + + const pulse = () => { + Search._pulse_status = (Search._pulse_status + 1) % 4; + Search.dots.innerText = ".".repeat(Search._pulse_status); + if (Search._pulse_status >= 0) window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch: (query) => { + // create the required interface elements + const searchText = document.createElement("h2"); + searchText.textContent = _("Searching"); + const searchSummary = document.createElement("p"); + searchSummary.classList.add("search-summary"); + searchSummary.innerText = ""; + const searchList = document.createElement("ul"); + searchList.classList.add("search"); + + const out = document.getElementById("search-results"); + Search.title = out.appendChild(searchText); + Search.dots = Search.title.appendChild(document.createElement("span")); + Search.status = out.appendChild(searchSummary); + Search.output = out.appendChild(searchList); + + const searchProgress = document.getElementById("search-progress"); + // Some themes don't use the search progress node + if (searchProgress) { + searchProgress.innerText = _("Preparing search..."); + } + Search.startPulse(); + + // index already loaded, the browser was quick! + if (Search.hasIndex()) Search.query(query); + else Search.deferQuery(query); + }, + + _parseQuery: (query) => { + // stem the search terms and add them to the correct list + const stemmer = new Stemmer(); + const searchTerms = new Set(); + const excludedTerms = new Set(); + const highlightTerms = new Set(); + const objectTerms = new Set(splitQuery(query.toLowerCase().trim())); + splitQuery(query.trim()).forEach((queryTerm) => { + const queryTermLower = queryTerm.toLowerCase(); + + // maybe skip this "word" + // stopwords array is from language_data.js + if ( + stopwords.indexOf(queryTermLower) !== -1 || + queryTerm.match(/^\d+$/) + ) + return; + + // stem the word + let word = stemmer.stemWord(queryTermLower); + // select the correct list + if (word[0] === "-") excludedTerms.add(word.substr(1)); + else { + searchTerms.add(word); + highlightTerms.add(queryTermLower); + } + }); + + if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js + localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" ")) + } + + // console.debug("SEARCH: searching for:"); + // console.info("required: ", [...searchTerms]); + // console.info("excluded: ", [...excludedTerms]); + + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename]. + const normalResults = []; + const nonMainIndexResults = []; + + _removeChildren(document.getElementById("search-progress")); + + const queryLower = query.toLowerCase().trim(); + for (const [title, foundTitles] of Object.entries(allTitles)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { + for (const [file, id] of foundTitles) { + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ + docNames[file], + titles[file] !== title ? `${titles[file]} > ${title}` : title, + id !== null ? "#" + id : "", + null, + score + boost, + filenames[file], + ]); + } + } + } + + // search for explicit entries in index directives + for (const [entry, foundEntries] of Object.entries(indexEntries)) { + if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ + docNames[file], + titles[file], + id ? "#" + id : "", + null, + score, + filenames[file], + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } + } + } + } + + // lookup as object + objectTerms.forEach((term) => + normalResults.push(...Search.performObjectSearch(term, objectTerms)) + ); + + // lookup as search terms in fulltext + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + + // let the scorer override scores with a custom scoring function + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; + + // remove duplicate search results + // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept + let seen = new Set(); + results = results.reverse().reduce((acc, result) => { + let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(','); + if (!seen.has(resultStr)) { + acc.push(result); + seen.add(resultStr); + } + return acc; + }, []); + + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); + + // for debugging + //Search.lastresults = results.slice(); // a copy + // console.info("search results:", Search.lastresults); + + // print the results + _displayNextItem(results, results.length, searchTerms, highlightTerms); + }, + + /** + * search for object names + */ + performObjectSearch: (object, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const objects = Search._index.objects; + const objNames = Search._index.objnames; + const titles = Search._index.titles; + + const results = []; + + const objectSearchCallback = (prefix, match) => { + const name = match[4] + const fullname = (prefix ? prefix + "." : "") + name; + const fullnameLower = fullname.toLowerCase(); + if (fullnameLower.indexOf(object) < 0) return; + + let score = 0; + const parts = fullnameLower.split("."); + + // check for different match types: exact matches of full name or + // "last name" (i.e. last dotted part) + if (fullnameLower === object || parts.slice(-1)[0] === object) + score += Scorer.objNameMatch; + else if (parts.slice(-1)[0].indexOf(object) > -1) + score += Scorer.objPartialMatch; // matches in last name + + const objName = objNames[match[1]][2]; + const title = titles[match[0]]; + + // If more than one term searched for, we require other words to be + // found in the name/title/description + const otherTerms = new Set(objectTerms); + otherTerms.delete(object); + if (otherTerms.size > 0) { + const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase(); + if ( + [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0) + ) + return; + } + + let anchor = match[3]; + if (anchor === "") anchor = fullname; + else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname; + + const descr = objName + _(", in ") + title; + + // add custom score for some objects according to scorer + if (Scorer.objPrio.hasOwnProperty(match[2])) + score += Scorer.objPrio[match[2]]; + else score += Scorer.objPrioDefault; + + results.push([ + docNames[match[0]], + fullname, + "#" + anchor, + descr, + score, + filenames[match[0]], + ]); + }; + Object.keys(objects).forEach((prefix) => + objects[prefix].forEach((array) => + objectSearchCallback(prefix, array) + ) + ); + return results; + }, + + /** + * search for full-text terms in the index + */ + performTermsSearch: (searchTerms, excludedTerms) => { + // prepare search + const terms = Search._index.terms; + const titleTerms = Search._index.titleterms; + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + + const scoreMap = new Map(); + const fileMap = new Map(); + + // perform the search on the required terms + searchTerms.forEach((word) => { + const files = []; + const arr = [ + { files: terms[word], score: Scorer.term }, + { files: titleTerms[word], score: Scorer.title }, + ]; + // add support for partial matches + if (word.length > 2) { + const escapedWord = _escapeRegExp(word); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } + } + + // no match but word was a required one + if (arr.every((record) => record.files === undefined)) return; + + // found search word in contents + arr.forEach((record) => { + if (record.files === undefined) return; + + let recordFiles = record.files; + if (recordFiles.length === undefined) recordFiles = [recordFiles]; + files.push(...recordFiles); + + // set score for the word in each file + recordFiles.forEach((file) => { + if (!scoreMap.has(file)) scoreMap.set(file, {}); + scoreMap.get(file)[word] = record.score; + }); + }); + + // create the mapping + files.forEach((file) => { + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); + }); + }); + + // now check if the files don't contain excluded terms + const results = []; + for (const [file, wordList] of fileMap) { + // check if all requirements are matched + + // as search terms with length < 3 are discarded + const filteredTermCount = [...searchTerms].filter( + (term) => term.length > 2 + ).length; + if ( + wordList.length !== searchTerms.size && + wordList.length !== filteredTermCount + ) + continue; + + // ensure that none of the excluded terms is in the search result + if ( + [...excludedTerms].some( + (term) => + terms[term] === file || + titleTerms[term] === file || + (terms[term] || []).includes(file) || + (titleTerms[term] || []).includes(file) + ) + ) + break; + + // select one (max) score for the file. + const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + // add result to the result list + results.push([ + docNames[file], + titles[file], + "", + null, + score, + filenames[file], + ]); + } + return results; + }, + + /** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words. + */ + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); + if (text === "") return null; + + const textLower = text.toLowerCase(); + const actualStartPosition = [...keywords] + .map((k) => textLower.indexOf(k.toLowerCase())) + .filter((i) => i > -1) + .slice(-1)[0]; + const startWithContext = Math.max(actualStartPosition - 120, 0); + + const top = startWithContext === 0 ? "" : "..."; + const tail = startWithContext + 240 < text.length ? "..." : ""; + + let summary = document.createElement("p"); + summary.classList.add("context"); + summary.textContent = top + text.substr(startWithContext, 240).trim() + tail; + + return summary; + }, +}; + +_ready(Search.init); diff --git a/_static/sphinx_highlight.js b/_static/sphinx_highlight.js new file mode 100644 index 0000000..8a96c69 --- /dev/null +++ b/_static/sphinx_highlight.js @@ -0,0 +1,154 @@ +/* Highlighting utilities for Sphinx HTML documentation. */ +"use strict"; + +const SPHINX_HIGHLIGHT_ENABLED = true + +/** + * highlight a given string on a node by wrapping it in + * span elements with the given class name. + */ +const _highlight = (node, addItems, text, className) => { + if (node.nodeType === Node.TEXT_NODE) { + const val = node.nodeValue; + const parent = node.parentNode; + const pos = val.toLowerCase().indexOf(text); + if ( + pos >= 0 && + !parent.classList.contains(className) && + !parent.classList.contains("nohighlight") + ) { + let span; + + const closestNode = parent.closest("body, svg, foreignObject"); + const isInSVG = closestNode && closestNode.matches("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.classList.add(className); + } + + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); + parent.insertBefore( + span, + parent.insertBefore( + rest, + node.nextSibling + ) + ); + node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); + + if (isInSVG) { + const rect = document.createElementNS( + "http://www.w3.org/2000/svg", + "rect" + ); + const bbox = parent.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute("class", className); + addItems.push({ parent: parent, target: rect }); + } + } + } else if (node.matches && !node.matches("button, select, textarea")) { + node.childNodes.forEach((el) => _highlight(el, addItems, text, className)); + } +}; +const _highlightText = (thisNode, text, className) => { + let addItems = []; + _highlight(thisNode, addItems, text, className); + addItems.forEach((obj) => + obj.parent.insertAdjacentElement("beforebegin", obj.target) + ); +}; + +/** + * Small JavaScript module for the documentation. + */ +const SphinxHighlight = { + + /** + * highlight the search words provided in localstorage in the text + */ + highlightSearchWords: () => { + if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight + + // get and clear terms from localstorage + const url = new URL(window.location); + const highlight = + localStorage.getItem("sphinx_highlight_terms") + || url.searchParams.get("highlight") + || ""; + localStorage.removeItem("sphinx_highlight_terms") + url.searchParams.delete("highlight"); + window.history.replaceState({}, "", url); + + // get individual terms from highlight string + const terms = highlight.toLowerCase().split(/\s+/).filter(x => x); + if (terms.length === 0) return; // nothing to do + + // There should never be more than one element matching "div.body" + const divBody = document.querySelectorAll("div.body"); + const body = divBody.length ? divBody[0] : document.querySelector("body"); + window.setTimeout(() => { + terms.forEach((term) => _highlightText(body, term, "highlighted")); + }, 10); + + const searchBox = document.getElementById("searchbox"); + if (searchBox === null) return; + searchBox.appendChild( + document + .createRange() + .createContextualFragment( + '" + ) + ); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords: () => { + document + .querySelectorAll("#searchbox .highlight-link") + .forEach((el) => el.remove()); + document + .querySelectorAll("span.highlighted") + .forEach((el) => el.classList.remove("highlighted")); + localStorage.removeItem("sphinx_highlight_terms") + }, + + initEscapeListener: () => { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return; + if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) { + SphinxHighlight.hideSearchWords(); + event.preventDefault(); + } + }); + }, +}; + +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/convert_to_vgsl_spec.html b/convert_to_vgsl_spec.html new file mode 100644 index 0000000..d4e6991 --- /dev/null +++ b/convert_to_vgsl_spec.html @@ -0,0 +1,194 @@ + + + + + + + + + Converting Models Back to VGSL Spec — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Converting Models Back to VGSL Spec

+

VGSLify now includes the ability to convert a trained or existing model back into a VGSL specification string. This functionality is useful for:

+
    +
  • Sharing model architectures in a concise format.

  • +
  • Reproducing models from the VGSL spec string.

  • +
  • Analyzing and understanding complex models via their VGSL representation.

  • +
+
+

How It Works

+

After you build or load a model using TensorFlow (with PyTorch support planned), you can convert it back into its VGSL specification string using the model_to_spec() function provided by VGSLify.

+
+

Example: Convert a Model to VGSL Spec

+

Here’s how you can convert an existing model to its VGSL spec:

+
from vgslify.utils import model_to_spec
+from tensorflow.keras.models import load_model
+
+# Load an existing TensorFlow model (previously saved)
+model = load_model("path_to_your_model.keras")
+
+# Convert the model to VGSL spec
+vgsl_spec = model_to_spec(model)
+print(vgsl_spec)
+
+
+

The above example will output the VGSL spec string corresponding to the architecture of the loaded model.

+
+
+

Saving and Reusing VGSL Spec

+

Once you’ve converted the model to a VGSL spec, you can easily save or share the spec string. This can be reused to rebuild the same model using VGSLify.

+
    +
  1. Save the VGSL Spec:

    +
      +
    • Save the generated VGSL spec string to a file or store it in your project for later use.

    • +
    +
  2. +
+
with open("model_spec.txt", "w") as f:
+    f.write(vgsl_spec)
+
+
+
    +
  1. Rebuild the Model from the Spec:

    +
      +
    • You can use the saved VGSL spec to rebuild the exact same model at any time.

    • +
    +
  2. +
+
from vgslify.generator import VGSLModelGenerator
+
+# Load the VGSL spec from file
+with open("model_spec.txt", "r") as f:
+    vgsl_spec = f.read()
+
+# Rebuild the model from the spec
+vgsl_gn = VGSLModelGenerator(backend="tensorflow")
+model = vgsl_gn.generate_model(vgsl_spec)
+model.summary()
+
+
+

By using this functionality, you can quickly share, reproduce, and analyze deep learning models in a concise format.

+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/genindex.html b/genindex.html new file mode 100644 index 0000000..de84387 --- /dev/null +++ b/genindex.html @@ -0,0 +1,701 @@ + + + + + + + + Index — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Index

+ +
+ _ + | A + | B + | C + | D + | F + | G + | H + | I + | K + | L + | M + | P + | R + | S + | T + | U + | V + | W + +
+

_

+ + +
+ +

A

+ + + +
+ +

B

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + +
+ +

I

+ + + +
+ +

K

+ + +
+ +

L

+ + + +
+ +

M

+ + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + +
+ +

V

+ + + +
    +
  • + vgslify + +
  • +
  • + vgslify.core + +
  • +
  • + vgslify.core.config + +
  • +
  • + vgslify.core.factory + +
  • +
  • + vgslify.core.parser + +
  • +
  • + vgslify.core.utils + +
  • +
  • + vgslify.generator + +
  • +
  • + vgslify.parsers + +
  • +
  • + vgslify.parsers.base_parser + +
  • +
+ +

W

+ + +
+ + + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/getting_started.html b/getting_started.html new file mode 100644 index 0000000..328ea40 --- /dev/null +++ b/getting_started.html @@ -0,0 +1,210 @@ + + + + + + + + + Getting Started — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Getting Started

+
+

Overview

+

VGSLify makes it incredibly simple to define, build, and train deep learning models using the Variable-size Graph Specification Language (VGSL). VGSL strings serve as compact representations of neural network architectures, allowing you to build models in a single line.

+

VGSLify abstracts the complexity of backend-specific syntax, enabling seamless switching between TensorFlow and, in future releases, PyTorch. This flexibility allows you to focus on model architecture and training without worrying about framework-specific implementations.

+
+

What is a VGSL Specification?

+

A VGSL specification string concisely defines a neural network’s architecture. The string encodes all layers, including input, convolutional layers, pooling, fully connected layers, and more. Each part of the string corresponds to a different component of the model.

+

For example, the following VGSL string defines a simple convolutional neural network:

+

None,28,28,1 Cr3,3,32 Mp2,2,2,2 Rc Fr64 D20 Fs10

+

This string represents a model with an input layer, a convolutional layer, a max pooling layer, a reshape layer, a dense (fully connected) layer, dropout, and an output layer. The model’s structure is encoded entirely within this single line.

+

Key functionality of VGSLify includes:

+
    +
  • Building models with a single line: You can define complex architectures with a VGSL string, reducing the need for verbose code.

  • +
  • Switching between TensorFlow and PyTorch: VGSLify supports both TensorFlow and (planned) PyTorch, allowing you to easily switch between backends.

  • +
+
+
+
+

Simple Example: Building a Model

+

Let’s walk through building a simple deep learning model using VGSLify.

+
    +
  1. Import the VGSLModelGenerator:

    +

    The VGSLModelGenerator class is the core component for building models from VGSL strings. Begin by importing it:

    +
    from vgslify.generator import VGSLModelGenerator
    +
    +
    +
  2. +
  3. Define the VGSL Specification String:

    +

    The VGSL spec string encodes the structure of the model. In this example, we will define a simple convolutional neural network suitable for handling MNIST digit images (28x28 grayscale):

    +
    vgsl_spec = "None,28,28,1 Cr3,3,32 Mp2,2,2,2 Rc2 Fr64 D20 Fs10"
    +
    +
    +
  4. +
  5. Build and View the Model:

    +

    Initialize the VGSLModelGenerator and use it to build the model based on the VGSL spec string:

    +
    vgsl_gn = VGSLModelGenerator(backend="tensorflow")  # Set backend to TensorFlow
    +model = vgsl_gn.generate_model(vgsl_spec)
    +model.summary()  # View the model architecture
    +
    +
    +

    This will generate the model and display a summary of its architecture, including all layers defined by the VGSL spec string.

    +
  6. +
+
+
+

Explanation of Layers

+

Let’s break down the layers defined by the VGSL specification string in our example:

+
    +
  • Input Layer: None,28,28,1 +- This defines the input shape of the model, which corresponds to grayscale images of size 28x28 pixels. The first dimension (None) allows for a variable batch size.

  • +
  • Convolutional Layer: Cr3,3,32 +- This adds a 2D convolutional layer with a 3x3 kernel and 32 output filters, using ReLU activation (r for ReLU).

  • +
  • MaxPooling Layer: Mp2,2,2,2 +- This reduces the spatial dimensions by applying 2x2 max pooling with a stride of 2x2, which downsamples the input by taking the maximum value over each 2x2 window.

  • +
  • Reshape Layer: Rc2 +- Reshapes the output from the previous layer, collapsing the spatial dimensions into a single vector suitable for fully connected layers.

  • +
  • Fully Connected Layer: Fc64 +- Adds a fully connected layer (dense layer) with 64 units.

  • +
  • Dropout Layer: D20 +- Applies dropout with a 20% rate to prevent overfitting by randomly setting a portion of the inputs to zero during training.

  • +
  • Output Layer: Fs10 +- Represents the output layer with 10 units (for 10 classes, such as the digits in MNIST) using softmax activation.

  • +
+

This VGSL string provides a concise, human-readable format for specifying complex model architectures. VGSLify automatically translates this specification into a deep learning model that can be trained using TensorFlow.

+
+
+

Next Steps

+

Once you’ve built and explored a basic model, you can dive deeper into VGSLify’s capabilities. Follow the [tutorials](tutorials.html) to explore more advanced use cases such as:

+
    +
  • Using different VGSL spec strings to define custom architectures.

  • +
  • Switching between TensorFlow and PyTorch backends (PyTorch support coming soon).

  • +
  • Integrating VGSLify models into larger deep learning workflows.

  • +
+

Check out the API reference for detailed information on all available classes, methods, and utilities in VGSLify.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 0000000..555a9bf --- /dev/null +++ b/index.html @@ -0,0 +1,235 @@ + + + + + + + + + VGSLify Documentation — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

VGSLify Documentation

+

vgslify is a powerful tool for creating and managing models using the Variable-size Graph Specification Language (VGSL). It offers flexibility by supporting both TensorFlow and (planned) PyTorch backends, making it suitable for various deep learning workflows.

+

This documentation provides a comprehensive guide to getting started with VGSLify, in-depth tutorials, and detailed API references for developers.

+
+

Getting Started

+

New to VGSLify? Follow these guides to install and start building models with VGSL:

+ +
+
+

User Guides

+

Explore advanced usage, examples, and details on the VGSL specification:

+ +
+
+

API Reference

+

For developers looking for more technical details, the following API documentation will help you understand and use the library:

+ +
+
+
+

Indices and tables

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/installation.html b/installation.html new file mode 100644 index 0000000..4096fbd --- /dev/null +++ b/installation.html @@ -0,0 +1,185 @@ + + + + + + + + + Installation — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installation

+
+

Prerequisites

+

Before installing VGSLify, make sure your system meets the following requirements:

+
    +
  • Python Version: VGSLify requires Python 3.8 or newer. Ensure that you have the correct version installed by running the following command:

    +
    python --version
    +
    +
    +
  • +
  • Required Packages:

    +
      +
    • pip: Python’s package manager is required to install VGSLify and its dependencies.

    • +
    • TensorFlow: If you are using VGSLify with TensorFlow, you will need to install TensorFlow as a backend.

    • +
    • PyTorch: PyTorch support is planned for future releases, but is not yet available in the current version.

    • +
    +
  • +
  • VGSLify is BYOB (Bring Your Own Backend): VGSLify itself does not include a deep learning framework. Users must install their preferred backend—TensorFlow for now, with PyTorch support planned in future releases. This approach gives you flexibility in choosing your backend.

  • +
+
+
+

Installing VGSLify

+

You can install VGSLify in several ways, depending on whether you want the stable release or a development version.

+
    +
  1. Install the latest version via pip:

    +

    The easiest way to get VGSLify is by using pip. Run the following command in your terminal:

    +
    pip install vgslify
    +
    +
    +
  2. +
  3. Install TensorFlow Backend:

    +

    VGSLify is a BYOB package, which means you will need to install a backend separately. If you want to use TensorFlow as the backend, you can install it with the following command:

    +
    pip install tensorflow
    +
    +
    +
  4. +
  5. Install the Development Version from Source:

    +

    If you want to work with the development version or modify VGSLify, you can install it directly from the source repository. Follow these steps:

    +
    git clone https://github.com/TimKoornstra/vgslify.git
    +cd vgslify
    +pip install .
    +
    +
    +

    This will install VGSLify and all of its dependencies in your environment.

    +
  6. +
+
+
+

Verifying Installation

+

After installation, you can verify that VGSLify has been successfully installed and is functioning correctly by running the following command:

+
python -c "import vgslify; print(vgslify.__version__)"
+
+
+

This should print the installed version of VGSLify without any errors. If the version is displayed correctly, the installation is successful.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/introduction.html b/introduction.html new file mode 100644 index 0000000..58549d9 --- /dev/null +++ b/introduction.html @@ -0,0 +1,176 @@ + + + + + + + + + Introduction — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Introduction

+
+

Overview of VGSLify

+

VGSLify is a toolkit designed to simplify the creation, training, and interpretation of deep learning models through the use of the Variable-size Graph Specification Language (VGSL). VGSL, originally developed for Tesseract OCR, provides a compact and flexible way to define neural network architectures in string format. VGSLify builds on this idea and adds support for modern deep learning frameworks like TensorFlow (with PyTorch planned for future versions), offering a user-friendly interface to create and manage neural network models.

+
+

What is VGSLify?

+

VGSLify leverages the power of VGSL to let users define neural networks using simple, compact strings that specify layers, their configurations, and connections. This approach eliminates the need for verbose and complex code when defining model architectures, making it easier to iterate on design, experimentation, and deployment. With VGSLify, you can quickly prototype models and convert between VGSL strings and executable code in deep learning frameworks.

+

VGSLify abstracts away the complexities of framework-specific syntax, allowing users to focus on model architecture and training. By supporting both TensorFlow and PyTorch (planned), it ensures flexibility for users who might prefer one framework over the other.

+
+
+

Key Features

+

VGSLify offers several key features to help streamline the process of deep learning model development:

+
    +
  • Supports TensorFlow and (planned) PyTorch backends: VGSLify currently works with TensorFlow, with PyTorch support planned in future releases.

  • +
  • Flexible model specification with VGSL: VGSL is a compact language that allows for the definition of models with just a string, simplifying architecture description. Users can specify layers, input shapes, activations, and more in a single line.

  • +
  • Easy conversion between VGSL specs and code: VGSLify offers utilities to convert VGSL strings into fully functional TensorFlow models, making it easy to go from abstract model definitions to trainable models. It also includes tools for converting trained models back into VGSL spec strings for easy sharing and reproduction.

  • +
+
+
+

Target Audience

+

VGSLify is aimed at data scientists, researchers, and developers who need a concise and flexible way to define, experiment with, and manage deep learning models. Whether you’re a beginner looking for an easier way to get started with neural networks or an experienced developer seeking a faster way to prototype architectures, VGSLify provides a powerful and intuitive toolset.

+
+
+
+

Why Use VGSLify?

+

VGSLify is designed to streamline the model creation process, helping users avoid common pain points in deep learning development:

+
    +
  • Reduces boilerplate code for defining models: Instead of writing hundreds of lines of code to define your architecture, VGSLify allows you to express it in a single string.

  • +
  • Streamlines model design, training, and evaluation: The compact VGSL string format makes it easy to modify architectures, test different configurations, and train models without needing to refactor large amounts of code.

  • +
  • Facilitates collaboration and reproducibility: VGSLify allows users to share models in a concise, human-readable format, making it easier to reproduce results across different machines or by different users.

  • +
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/objects.inv b/objects.inv new file mode 100644 index 0000000..5e77e5b Binary files /dev/null and b/objects.inv differ diff --git a/py-modindex.html b/py-modindex.html new file mode 100644 index 0000000..59e6f81 --- /dev/null +++ b/py-modindex.html @@ -0,0 +1,217 @@ + + + + + + + + Python Module Index — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Python Module Index

+ +
+ v +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ v
+ vgslify +
    + vgslify.core +
    + vgslify.core.config +
    + vgslify.core.factory +
    + vgslify.core.parser +
    + vgslify.core.utils +
    + vgslify.generator +
    + vgslify.parsers +
    + vgslify.parsers.base_parser +
    + vgslify.parsers.tf_parser +
    + vgslify.parsers.torch_parser +
    + vgslify.tensorflow +
    + vgslify.tensorflow.layers +
    + vgslify.torch +
    + vgslify.torch.layers +
    + vgslify.utils +
    + vgslify.utils.model_to_spec +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/search.html b/search.html new file mode 100644 index 0000000..f787e1c --- /dev/null +++ b/search.html @@ -0,0 +1,137 @@ + + + + + + + + Search — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + + + +
+ +
+ +
+
+ +
+
+
+
+ + + + + + + + + \ No newline at end of file diff --git a/searchindex.js b/searchindex.js new file mode 100644 index 0000000..a655c08 --- /dev/null +++ b/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"alltitles": {"API Reference": [[2, "api-reference"]], "API Reference:": [[2, null]], "Conv2D Layer": [[13, "conv2d-layer"]], "Converting Models Back to VGSL Spec": [[0, null]], "Dense (Fully Connected) Layer": [[13, "dense-fully-connected-layer"]], "Dropout Layer": [[13, "dropout-layer"]], "Example: Convert a Model to VGSL Spec": [[0, "example-convert-a-model-to-vgsl-spec"]], "Explanation of Layers": [[1, "explanation-of-layers"]], "Getting Started": [[1, null], [2, "getting-started"]], "Getting Started:": [[2, null]], "How It Works": [[0, "how-it-works"]], "Indices and tables": [[2, "indices-and-tables"]], "Input Layer": [[13, "input-layer"]], "Installation": [[3, null]], "Installing VGSLify": [[3, "installing-vgslify"]], "Introduction": [[4, null]], "Key Features": [[4, "key-features"]], "Layer Specifications": [[13, "layer-specifications"]], "Links to Documentation": [[4, "links-to-documentation"]], "Module contents": [[6, "module-vgslify"], [7, "module-vgslify.core"], [8, "module-contents"], [9, "module-vgslify.parsers"], [10, "module-vgslify.tensorflow"], [11, "module-vgslify.torch"], [12, "module-vgslify.utils"]], "More Examples": [[13, "more-examples"]], "Next Steps": [[1, "next-steps"]], "Output Layer": [[13, "output-layer"]], "Overview": [[1, "overview"], [14, "overview"], [14, "id1"]], "Overview of VGSLify": [[4, "overview-of-vgslify"]], "Pooling2D Layer": [[13, "pooling2d-layer"]], "Prerequisites": [[3, "prerequisites"]], "RNN Layer (LSTM/GRU/Bidirectional)": [[13, "rnn-layer-lstm-gru-bidirectional"]], "Reshape Layer": [[13, "reshape-layer"]], "Saving and Reusing VGSL Spec": [[0, "saving-and-reusing-vgsl-spec"]], "Simple Example: Building a Model": [[1, "simple-example-building-a-model"]], "Step-by-Step Instructions": [[14, "step-by-step-instructions"], [14, "id2"]], "Submodules": [[6, "submodules"], [7, "submodules"], [8, "submodules"], [9, "submodules"], [10, "submodules"], [11, "submodules"], [12, "submodules"]], "Subpackages": [[6, "subpackages"]], "Supported Layers": [[13, null]], "Target Audience": [[4, "target-audience"]], "Tutorial 1: Building a CNN for Image Classification": [[14, "tutorial-1-building-a-cnn-for-image-classification"]], "Tutorial 2: Creating an LSTM for Sequence Prediction": [[14, "tutorial-2-creating-an-lstm-for-sequence-prediction"]], "Tutorials": [[14, null]], "User Guides": [[2, "user-guides"]], "User Guides:": [[2, null]], "VGSLify Documentation": [[2, null]], "Verifying Installation": [[3, "verifying-installation"]], "What is VGSLify?": [[4, "what-is-vgslify"]], "What is a VGSL Specification?": [[1, "what-is-a-vgsl-specification"]], "Why Use VGSLify?": [[4, "why-use-vgslify"]], "vgslify": [[5, null]], "vgslify package": [[6, null]], "vgslify.core package": [[7, null]], "vgslify.core.config module": [[7, "module-vgslify.core.config"]], "vgslify.core.factory module": [[7, "module-vgslify.core.factory"]], "vgslify.core.parser module": [[7, "module-vgslify.core.parser"]], "vgslify.core.utils module": [[7, "module-vgslify.core.utils"]], "vgslify.generator module": [[6, "module-vgslify.generator"]], "vgslify.parser module": [[6, "vgslify-parser-module"]], "vgslify.parser package": [[8, null]], "vgslify.parser.tf_parser module": [[8, "vgslify-parser-tf-parser-module"]], "vgslify.parsers package": [[9, null]], "vgslify.parsers.base_parser module": [[9, "module-vgslify.parsers.base_parser"]], "vgslify.parsers.tf_parser module": [[9, "module-vgslify.parsers.tf_parser"]], "vgslify.parsers.torch_parser module": [[9, "module-vgslify.parsers.torch_parser"]], "vgslify.tensorflow package": [[10, null]], "vgslify.tensorflow.layers module": [[10, "module-vgslify.tensorflow.layers"]], "vgslify.torch package": [[11, null]], "vgslify.torch.layers module": [[11, "module-vgslify.torch.layers"]], "vgslify.utils package": [[12, null]], "vgslify.utils.model_to_spec module": [[12, "module-vgslify.utils.model_to_spec"]]}, "docnames": ["convert_to_vgsl_spec", "getting_started", "index", "installation", "introduction", "source/modules", "source/vgslify", "source/vgslify.core", "source/vgslify.parser", "source/vgslify.parsers", "source/vgslify.tensorflow", "source/vgslify.torch", "source/vgslify.utils", "supported_layers", "tutorials"], "envversion": {"sphinx": 64, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1}, "filenames": ["convert_to_vgsl_spec.rst", "getting_started.rst", "index.rst", "installation.rst", "introduction.rst", "source/modules.rst", "source/vgslify.rst", "source/vgslify.core.rst", "source/vgslify.parser.rst", "source/vgslify.parsers.rst", "source/vgslify.tensorflow.rst", "source/vgslify.torch.rst", "source/vgslify.utils.rst", "supported_layers.rst", "tutorials.rst"], "indexentries": {"_input_shape (vgslify.core.factory.layerfactory attribute)": [[7, "vgslify.core.factory.LayerFactory._input_shape", false]], "_input_shape (vgslify.tensorflow.layers.tensorflowlayerfactory attribute)": [[10, "vgslify.tensorflow.layers.TensorFlowLayerFactory._input_shape", false]], "_input_shape (vgslify.torch.layers.torchlayerfactory attribute)": [[11, "vgslify.torch.layers.TorchLayerFactory._input_shape", false]], "activation (vgslify.core.config.activationconfig attribute)": [[7, "vgslify.core.config.ActivationConfig.activation", false]], "activation (vgslify.core.config.conv2dconfig attribute)": [[7, "vgslify.core.config.Conv2DConfig.activation", false]], "activation (vgslify.core.config.denseconfig attribute)": [[7, "vgslify.core.config.DenseConfig.activation", false]], "activation() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.activation", false]], "activationconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.ActivationConfig", false]], "basemodelparser (class in vgslify.parsers.base_parser)": [[9, "vgslify.parsers.base_parser.BaseModelParser", false]], "batch_size (vgslify.core.config.inputconfig attribute)": [[7, "vgslify.core.config.InputConfig.batch_size", false]], "batchnorm() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.batchnorm", false]], "bidirectional (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.bidirectional", false]], "build() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.build", false]], "build() (vgslify.tensorflow.layers.tensorflowlayerfactory method)": [[10, "vgslify.tensorflow.layers.TensorFlowLayerFactory.build", false]], "build() (vgslify.torch.layers.torchlayerfactory method)": [[11, "vgslify.torch.layers.TorchLayerFactory.build", false]], "channels (vgslify.core.config.inputconfig attribute)": [[7, "vgslify.core.config.InputConfig.channels", false]], "construct_layer() (vgslify.generator.vgslmodelgenerator method)": [[6, "vgslify.generator.VGSLModelGenerator.construct_layer", false]], "conv2d() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.conv2d", false]], "conv2dconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.Conv2DConfig", false]], "data_format (vgslify.core.factory.layerfactory attribute)": [[7, "vgslify.core.factory.LayerFactory.data_format", false]], "dense() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.dense", false]], "denseconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.DenseConfig", false]], "depth (vgslify.core.config.inputconfig attribute)": [[7, "vgslify.core.config.InputConfig.depth", false]], "dropout (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.dropout", false]], "dropout() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.dropout", false]], "dropoutconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.DropoutConfig", false]], "filters (vgslify.core.config.conv2dconfig attribute)": [[7, "vgslify.core.config.Conv2DConfig.filters", false]], "flatten() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.flatten", false]], "generate_history() (vgslify.generator.vgslmodelgenerator method)": [[6, "vgslify.generator.VGSLModelGenerator.generate_history", false]], "generate_model() (vgslify.generator.vgslmodelgenerator method)": [[6, "vgslify.generator.VGSLModelGenerator.generate_model", false]], "generate_vgsl() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.generate_vgsl", false]], "get_activation_function() (in module vgslify.core.utils)": [[7, "vgslify.core.utils.get_activation_function", false]], "go_backwards (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.go_backwards", false]], "height (vgslify.core.config.inputconfig attribute)": [[7, "vgslify.core.config.InputConfig.height", false]], "input() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.input", false]], "inputconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.InputConfig", false]], "kernel_size (vgslify.core.config.conv2dconfig attribute)": [[7, "vgslify.core.config.Conv2DConfig.kernel_size", false]], "layer_parsers (vgslify.parsers.tf_parser.tensorflowmodelparser attribute)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.layer_parsers", false]], "layer_parsers (vgslify.parsers.torch_parser.torchmodelparser attribute)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.layer_parsers", false]], "layerfactory (class in vgslify.core.factory)": [[7, "vgslify.core.factory.LayerFactory", false]], "layers (vgslify.core.factory.layerfactory attribute)": [[7, "vgslify.core.factory.LayerFactory.layers", false]], "layers (vgslify.tensorflow.layers.tensorflowlayerfactory attribute)": [[10, "vgslify.tensorflow.layers.TensorFlowLayerFactory.layers", false]], "layers (vgslify.torch.layers.torchlayerfactory attribute)": [[11, "vgslify.torch.layers.TorchLayerFactory.layers", false]], "model_to_spec() (in module vgslify.utils.model_to_spec)": [[12, "vgslify.utils.model_to_spec.model_to_spec", false]], "module": [[6, "module-vgslify", false], [6, "module-vgslify.generator", false], [7, "module-vgslify.core", false], [7, "module-vgslify.core.config", false], [7, "module-vgslify.core.factory", false], [7, "module-vgslify.core.parser", false], [7, "module-vgslify.core.utils", false], [9, "module-vgslify.parsers", false], [9, "module-vgslify.parsers.base_parser", false], [9, "module-vgslify.parsers.tf_parser", false], [9, "module-vgslify.parsers.torch_parser", false], [10, "module-vgslify.tensorflow", false], [10, "module-vgslify.tensorflow.layers", false], [11, "module-vgslify.torch", false], [11, "module-vgslify.torch.layers", false], [12, "module-vgslify.utils", false], [12, "module-vgslify.utils.model_to_spec", false]], "parse_activation() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_activation", false]], "parse_activation() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_activation", false]], "parse_activation() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_activation", false]], "parse_activation_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_activation_spec", false]], "parse_batchnorm() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_batchnorm", false]], "parse_batchnorm() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_batchnorm", false]], "parse_batchnorm() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_batchnorm", false]], "parse_conv2d() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_conv2d", false]], "parse_conv2d() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_conv2d", false]], "parse_conv2d() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_conv2d", false]], "parse_conv2d_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_conv2d_spec", false]], "parse_dense() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_dense", false]], "parse_dense() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_dense", false]], "parse_dense() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_dense", false]], "parse_dense_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_dense_spec", false]], "parse_dropout() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_dropout", false]], "parse_dropout() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_dropout", false]], "parse_dropout() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_dropout", false]], "parse_dropout_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_dropout_spec", false]], "parse_flatten() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_flatten", false]], "parse_flatten() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_flatten", false]], "parse_flatten() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_flatten", false]], "parse_input() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_input", false]], "parse_input() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_input", false]], "parse_input() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_input", false]], "parse_input_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_input_spec", false]], "parse_model() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_model", false]], "parse_model() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_model", false]], "parse_model() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_model", false]], "parse_pooling() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_pooling", false]], "parse_pooling() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_pooling", false]], "parse_pooling() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_pooling", false]], "parse_pooling2d_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_pooling2d_spec", false]], "parse_reshape() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_reshape", false]], "parse_reshape() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_reshape", false]], "parse_reshape() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_reshape", false]], "parse_reshape_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_reshape_spec", false]], "parse_rnn() (vgslify.parsers.base_parser.basemodelparser method)": [[9, "vgslify.parsers.base_parser.BaseModelParser.parse_rnn", false]], "parse_rnn() (vgslify.parsers.tf_parser.tensorflowmodelparser method)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser.parse_rnn", false]], "parse_rnn() (vgslify.parsers.torch_parser.torchmodelparser method)": [[9, "vgslify.parsers.torch_parser.TorchModelParser.parse_rnn", false]], "parse_rnn_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_rnn_spec", false]], "parse_spec() (in module vgslify.core.parser)": [[7, "vgslify.core.parser.parse_spec", false]], "pool_size (vgslify.core.config.pooling2dconfig attribute)": [[7, "vgslify.core.config.Pooling2DConfig.pool_size", false]], "pool_type (vgslify.core.config.pooling2dconfig attribute)": [[7, "vgslify.core.config.Pooling2DConfig.pool_type", false]], "pooling2d() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.pooling2d", false]], "pooling2dconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.Pooling2DConfig", false]], "rate (vgslify.core.config.dropoutconfig attribute)": [[7, "vgslify.core.config.DropoutConfig.rate", false]], "recurrent_dropout (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.recurrent_dropout", false]], "reshape() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.reshape", false]], "reshapeconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.ReshapeConfig", false]], "return_sequences (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.return_sequences", false]], "rnn() (vgslify.core.factory.layerfactory method)": [[7, "vgslify.core.factory.LayerFactory.rnn", false]], "rnn_type (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.rnn_type", false]], "rnnconfig (class in vgslify.core.config)": [[7, "vgslify.core.config.RNNConfig", false]], "shape (vgslify.core.factory.layerfactory attribute)": [[7, "vgslify.core.factory.LayerFactory.shape", false]], "shape (vgslify.tensorflow.layers.tensorflowlayerfactory attribute)": [[10, "vgslify.tensorflow.layers.TensorFlowLayerFactory.shape", false]], "shape (vgslify.torch.layers.torchlayerfactory attribute)": [[11, "vgslify.torch.layers.TorchLayerFactory.shape", false]], "strides (vgslify.core.config.conv2dconfig attribute)": [[7, "vgslify.core.config.Conv2DConfig.strides", false]], "strides (vgslify.core.config.pooling2dconfig attribute)": [[7, "vgslify.core.config.Pooling2DConfig.strides", false]], "target_shape (vgslify.core.config.reshapeconfig attribute)": [[7, "vgslify.core.config.ReshapeConfig.target_shape", false]], "tensorflowlayerfactory (class in vgslify.tensorflow.layers)": [[10, "vgslify.tensorflow.layers.TensorFlowLayerFactory", false]], "tensorflowmodelparser (class in vgslify.parsers.tf_parser)": [[9, "vgslify.parsers.tf_parser.TensorFlowModelParser", false]], "torchlayerfactory (class in vgslify.torch.layers)": [[11, "vgslify.torch.layers.TorchLayerFactory", false]], "torchmodelparser (class in vgslify.parsers.torch_parser)": [[9, "vgslify.parsers.torch_parser.TorchModelParser", false]], "units (vgslify.core.config.denseconfig attribute)": [[7, "vgslify.core.config.DenseConfig.units", false]], "units (vgslify.core.config.rnnconfig attribute)": [[7, "vgslify.core.config.RNNConfig.units", false]], "vgslify": [[6, "module-vgslify", false]], "vgslify.core": [[7, "module-vgslify.core", false]], "vgslify.core.config": [[7, "module-vgslify.core.config", false]], "vgslify.core.factory": [[7, "module-vgslify.core.factory", false]], "vgslify.core.parser": [[7, "module-vgslify.core.parser", false]], "vgslify.core.utils": [[7, "module-vgslify.core.utils", false]], "vgslify.generator": [[6, "module-vgslify.generator", false]], "vgslify.parsers": [[9, "module-vgslify.parsers", false]], "vgslify.parsers.base_parser": [[9, "module-vgslify.parsers.base_parser", false]], "vgslify.parsers.tf_parser": [[9, "module-vgslify.parsers.tf_parser", false]], "vgslify.parsers.torch_parser": [[9, "module-vgslify.parsers.torch_parser", false]], "vgslify.tensorflow": [[10, "module-vgslify.tensorflow", false]], "vgslify.tensorflow.layers": [[10, "module-vgslify.tensorflow.layers", false]], "vgslify.torch": [[11, "module-vgslify.torch", false]], "vgslify.torch.layers": [[11, "module-vgslify.torch.layers", false]], "vgslify.utils": [[12, "module-vgslify.utils", false]], "vgslify.utils.model_to_spec": [[12, "module-vgslify.utils.model_to_spec", false]], "vgslmodelgenerator (class in vgslify.generator)": [[6, "vgslify.generator.VGSLModelGenerator", false]], "width (vgslify.core.config.inputconfig attribute)": [[7, "vgslify.core.config.InputConfig.width", false]]}, "objects": {"": [[6, 0, 0, "-", "vgslify"]], "vgslify": [[7, 0, 0, "-", "core"], [6, 0, 0, "-", "generator"], [9, 0, 0, "-", "parsers"], [10, 0, 0, "-", "tensorflow"], [11, 0, 0, "-", "torch"], [12, 0, 0, "-", "utils"]], "vgslify.core": [[7, 0, 0, "-", "config"], [7, 0, 0, "-", "factory"], [7, 0, 0, "-", "parser"], [7, 0, 0, "-", "utils"]], "vgslify.core.config": [[7, 1, 1, "", "ActivationConfig"], [7, 1, 1, "", "Conv2DConfig"], [7, 1, 1, "", "DenseConfig"], [7, 1, 1, "", "DropoutConfig"], [7, 1, 1, "", "InputConfig"], [7, 1, 1, "", "Pooling2DConfig"], [7, 1, 1, "", "RNNConfig"], [7, 1, 1, "", "ReshapeConfig"]], "vgslify.core.config.ActivationConfig": [[7, 2, 1, "", "activation"]], "vgslify.core.config.Conv2DConfig": [[7, 2, 1, "", "activation"], [7, 2, 1, "", "filters"], [7, 2, 1, "", "kernel_size"], [7, 2, 1, "", "strides"]], "vgslify.core.config.DenseConfig": [[7, 2, 1, "", "activation"], [7, 2, 1, "", "units"]], "vgslify.core.config.DropoutConfig": [[7, 2, 1, "", "rate"]], "vgslify.core.config.InputConfig": [[7, 2, 1, "", "batch_size"], [7, 2, 1, "", "channels"], [7, 2, 1, "", "depth"], [7, 2, 1, "", "height"], [7, 2, 1, "", "width"]], "vgslify.core.config.Pooling2DConfig": [[7, 2, 1, "", "pool_size"], [7, 2, 1, "", "pool_type"], [7, 2, 1, "", "strides"]], "vgslify.core.config.RNNConfig": [[7, 2, 1, "", "bidirectional"], [7, 2, 1, "", "dropout"], [7, 2, 1, "", "go_backwards"], [7, 2, 1, "", "recurrent_dropout"], [7, 2, 1, "", "return_sequences"], [7, 2, 1, "", "rnn_type"], [7, 2, 1, "", "units"]], "vgslify.core.config.ReshapeConfig": [[7, 2, 1, "", "target_shape"]], "vgslify.core.factory": [[7, 1, 1, "", "LayerFactory"]], "vgslify.core.factory.LayerFactory": [[7, 2, 1, "", "_input_shape"], [7, 3, 1, "", "activation"], [7, 3, 1, "", "batchnorm"], [7, 3, 1, "", "build"], [7, 3, 1, "", "conv2d"], [7, 2, 1, "", "data_format"], [7, 3, 1, "", "dense"], [7, 3, 1, "", "dropout"], [7, 3, 1, "", "flatten"], [7, 3, 1, "", "input"], [7, 2, 1, "", "layers"], [7, 3, 1, "", "pooling2d"], [7, 3, 1, "", "reshape"], [7, 3, 1, "", "rnn"], [7, 2, 1, "", "shape"]], "vgslify.core.parser": [[7, 4, 1, "", "parse_activation_spec"], [7, 4, 1, "", "parse_conv2d_spec"], [7, 4, 1, "", "parse_dense_spec"], [7, 4, 1, "", "parse_dropout_spec"], [7, 4, 1, "", "parse_input_spec"], [7, 4, 1, "", "parse_pooling2d_spec"], [7, 4, 1, "", "parse_reshape_spec"], [7, 4, 1, "", "parse_rnn_spec"], [7, 4, 1, "", "parse_spec"]], "vgslify.core.utils": [[7, 4, 1, "", "get_activation_function"]], "vgslify.generator": [[6, 1, 1, "", "VGSLModelGenerator"]], "vgslify.generator.VGSLModelGenerator": [[6, 3, 1, "", "construct_layer"], [6, 3, 1, "", "generate_history"], [6, 3, 1, "", "generate_model"]], "vgslify.parsers": [[9, 0, 0, "-", "base_parser"], [9, 0, 0, "-", "tf_parser"], [9, 0, 0, "-", "torch_parser"]], "vgslify.parsers.base_parser": [[9, 1, 1, "", "BaseModelParser"]], "vgslify.parsers.base_parser.BaseModelParser": [[9, 3, 1, "", "generate_vgsl"], [9, 3, 1, "", "parse_activation"], [9, 3, 1, "", "parse_batchnorm"], [9, 3, 1, "", "parse_conv2d"], [9, 3, 1, "", "parse_dense"], [9, 3, 1, "", "parse_dropout"], [9, 3, 1, "", "parse_flatten"], [9, 3, 1, "", "parse_input"], [9, 3, 1, "", "parse_model"], [9, 3, 1, "", "parse_pooling"], [9, 3, 1, "", "parse_reshape"], [9, 3, 1, "", "parse_rnn"]], "vgslify.parsers.tf_parser": [[9, 1, 1, "", "TensorFlowModelParser"]], "vgslify.parsers.tf_parser.TensorFlowModelParser": [[9, 2, 1, "", "layer_parsers"], [9, 3, 1, "", "parse_activation"], [9, 3, 1, "", "parse_batchnorm"], [9, 3, 1, "", "parse_conv2d"], [9, 3, 1, "", "parse_dense"], [9, 3, 1, "", "parse_dropout"], [9, 3, 1, "", "parse_flatten"], [9, 3, 1, "", "parse_input"], [9, 3, 1, "", "parse_model"], [9, 3, 1, "", "parse_pooling"], [9, 3, 1, "", "parse_reshape"], [9, 3, 1, "", "parse_rnn"]], "vgslify.parsers.torch_parser": [[9, 1, 1, "", "TorchModelParser"]], "vgslify.parsers.torch_parser.TorchModelParser": [[9, 2, 1, "", "layer_parsers"], [9, 3, 1, "", "parse_activation"], [9, 3, 1, "", "parse_batchnorm"], [9, 3, 1, "", "parse_conv2d"], [9, 3, 1, "", "parse_dense"], [9, 3, 1, "", "parse_dropout"], [9, 3, 1, "", "parse_flatten"], [9, 3, 1, "", "parse_input"], [9, 3, 1, "", "parse_model"], [9, 3, 1, "", "parse_pooling"], [9, 3, 1, "", "parse_reshape"], [9, 3, 1, "", "parse_rnn"]], "vgslify.tensorflow": [[10, 0, 0, "-", "layers"]], "vgslify.tensorflow.layers": [[10, 1, 1, "", "TensorFlowLayerFactory"]], "vgslify.tensorflow.layers.TensorFlowLayerFactory": [[10, 2, 1, "", "_input_shape"], [10, 3, 1, "", "build"], [10, 2, 1, "", "layers"], [10, 2, 1, "", "shape"]], "vgslify.torch": [[11, 0, 0, "-", "layers"]], "vgslify.torch.layers": [[11, 1, 1, "", "TorchLayerFactory"]], "vgslify.torch.layers.TorchLayerFactory": [[11, 2, 1, "", "_input_shape"], [11, 3, 1, "", "build"], [11, 2, 1, "", "layers"], [11, 2, 1, "", "shape"]], "vgslify.utils": [[12, 0, 0, "-", "model_to_spec"]], "vgslify.utils.model_to_spec": [[12, 4, 1, "", "model_to_spec"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"], "4": ["py", "function", "Python function"]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method", "4": "py:function"}, "terms": {"": [0, 1, 3, 4, 7, 13, 14], "0": [7, 13, 14], "000": 14, "1": [1, 2, 7, 13], "10": [1, 13, 14], "100": [7, 13], "1000": 14, "128": [7, 14], "1d": [7, 13], "1x1": 13, "2": [1, 2, 7, 13], "20": [1, 14], "224": 7, "25": [7, 13, 14], "255": 14, "28": [1, 7, 13], "28x28": 1, "2d": [1, 7, 13], "2x2": [1, 13, 14], "3": [1, 3, 7, 13, 14], "32": [1, 7, 13, 14], "32x32": 14, "3d": [7, 13], "3x3": [1, 13, 14], "4096": 13, "4d": 7, "5": 7, "50": [13, 14], "6": 14, "60": 14, "64": [1, 7, 13, 14], "7": 7, "8": [3, 13], "A": [1, 6, 7, 9, 10, 11], "By": [0, 4, 14], "For": [1, 2, 7, 14], "If": [3, 6, 7, 9, 10, 11, 12, 13], "In": [1, 14], "It": [2, 4, 7, 9, 13], "One": 7, "The": [0, 1, 3, 4, 6, 7, 9, 10, 11, 12, 13, 14], "To": 4, "With": 4, "__version__": 3, "_conv2d": 7, "_input_shap": [6, 7, 10, 11], "abc": [7, 9], "abil": 0, "about": 1, "abov": 0, "abstract": [1, 4, 7, 9], "accumul": [10, 11], "accuraci": 14, "across": 4, "activ": [1, 4, 6, 7, 9, 10, 11, 13, 14], "activation_char": 7, "activationconfig": [6, 7, 9], "actual": 7, "ad": [9, 10, 11], "adam": 14, "add": [1, 4, 13], "addit": [9, 13], "adjust": 14, "advanc": [1, 2, 4, 13], "after": [0, 3, 14], "aim": 4, "all": [1, 3], "allow": [1, 4, 13, 14], "also": [4, 7, 14], "amount": 4, "an": [0, 1, 2, 4, 7, 9, 13], "analyz": 0, "ani": [0, 3, 6, 7], "ap": [7, 13], "api": [1, 4], "append": 14, "appli": [1, 13], "approach": [3, 4], "ar": [3, 7, 13], "arang": 14, "architectur": [0, 1, 4, 6, 7, 14], "arrai": 14, "assum": 7, "auto": 6, "automat": 1, "avail": [1, 3], "averag": [7, 9, 13], "averagepooling2d": 9, "avgpool2d": 9, "avoid": 4, "awai": 4, "axi": 14, "b": [7, 13], "back": [2, 4], "backend": [0, 1, 2, 3, 4, 6, 14], "backward": 7, "base": [1, 6, 7, 9, 10, 11, 14], "basemodelpars": 9, "basic": 1, "batch": [1, 7, 10, 11, 13, 14], "batch_siz": [6, 7, 13, 14], "batchnorm": [6, 7, 9], "batchnorm2d": 9, "been": [3, 10, 11], "befor": 3, "begin": [1, 4, 14], "beginn": 4, "between": [1, 4], "beyond": 9, "bidirect": [6, 7, 9], "bl128": 7, "bn": [7, 9], "boilerpl": 4, "bool": 7, "both": [1, 2, 4], "break": 1, "bring": 3, "build": [0, 2, 4, 6, 7, 10, 11], "built": [1, 6, 7], "byob": 3, "c": [3, 7, 13], "calcul": 7, "callabl": 9, "can": [0, 1, 3, 4, 7, 9, 12, 13, 14], "cannot": [9, 12], "capabl": [1, 4], "case": 1, "categorical_crossentropi": 14, "cd": 3, "chain": 6, "channel": [6, 7, 13, 14], "channels_first": 7, "channels_last": 7, "charact": 7, "check": 1, "choos": 3, "cifar": 14, "cifar10": 14, "class": [1, 4, 6, 7, 9, 10, 11, 13, 14], "classif": 2, "clone": 3, "cnn": 2, "code": [1, 4, 7], "collabor": 4, "collaps": [1, 7, 13], "color": 14, "com": 3, "come": 1, "command": 3, "common": [4, 7, 9], "commonli": 14, "compact": [1, 4], "compil": 14, "complet": 14, "complex": [0, 1, 4], "compon": 1, "comprehens": 2, "concis": [0, 1, 4, 13, 14], "concret": 7, "config": [2, 5, 6, 9], "configur": [4, 7, 9, 13], "connect": [1, 4, 6, 7, 14], "construct": [6, 10, 11], "construct_lay": [5, 6], "contain": 9, "content": [2, 5], "conv2d": [6, 7, 9], "conv2dconfig": [6, 7, 9], "convent": 7, "convers": 4, "convert": [2, 4, 9, 12, 14], "convolut": [1, 6, 7, 10, 11, 13, 14], "core": [1, 2, 5, 6], "correct": 3, "correctli": 3, "correspond": [0, 1, 7, 9], "cr3": [1, 7, 13, 14], "creat": [2, 4, 6, 7, 10, 11, 13], "create_sequ": 14, "creation": [4, 7, 10, 11, 14], "current": [3, 4, 6, 7, 10, 11], "custom": [1, 9], "d": [7, 13], "d20": [1, 14], "d25": 14, "d50": [7, 13], "data": [4, 7, 14], "data_format": [6, 7], "dataclass": 9, "dataset": 14, "deep": [0, 1, 2, 3, 4, 12, 14], "deeper": [1, 4], "def": 14, "default": [7, 10, 11, 13], "defin": [1, 4, 6, 7, 13, 14], "definit": 4, "dens": [1, 6, 7, 9, 10, 11, 14], "denseconfig": [6, 7, 9], "depend": 3, "deploy": 4, "depth": [2, 6, 7, 13], "descript": [4, 13], "design": 4, "detail": [1, 2, 4], "determin": 9, "develop": [2, 3, 4], "dict": 9, "dictionari": 9, "differ": [1, 4, 6, 7, 9], "digit": 1, "dimens": [1, 7, 13], "direct": 13, "directli": [3, 13], "displai": [1, 3], "dive": [1, 4], "doe": [3, 6, 7, 9], "down": 1, "downsampl": 1, "drop": 7, "dropout": [1, 6, 7, 9, 14], "dropoutconfig": [6, 7, 9], "dure": [1, 10, 11], "dynam": 6, "e": [7, 13], "each": [1, 6, 13], "easi": 4, "easier": 4, "easiest": 3, "easili": [0, 1], "either": 7, "elimin": 4, "enabl": 1, "encod": [1, 14], "end": 14, "ensur": [3, 4], "entir": 1, "environ": 3, "epoch": 14, "error": 3, "evalu": [4, 14], "evolv": 14, "exact": 0, "exampl": [2, 7, 12, 14], "exclud": [10, 11], "execut": 4, "exist": 0, "expand_dim": 14, "expect": [7, 14], "experi": 4, "experienc": 4, "experiment": 4, "explan": [2, 14], "explor": [1, 2, 4, 13], "express": 4, "extend": 9, "f": [0, 7, 13, 14], "facilit": 4, "factori": [2, 5, 6, 10, 11], "fals": 7, "familiar": 14, "faster": 4, "fc128": 14, "fc64": 1, "featur": [13, 14], "file": 0, "filter": [1, 6, 7, 13, 14], "final": [6, 7, 10, 11], "first": [1, 9, 13], "fit": 14, "fl1": 14, "flatten": [6, 7, 9, 14], "flexibl": [1, 2, 3, 4], "flexibli": 13, "float": 7, "flt": 7, "focu": [1, 4], "follow": [1, 2, 3, 4, 13, 14], "forecast": 14, "form": 13, "format": [0, 1, 4, 7, 13, 14], "forward": 13, "fr64": [1, 7, 13], "fraction": 7, "framework": [1, 3, 4, 7, 9], "friendli": 4, "from": [0, 1, 3, 4, 6, 7, 9, 12, 13, 14], "fs10": [1, 7, 14], "fs128": 7, "full": [6, 7], "fulli": [1, 4, 7, 14], "function": [0, 1, 3, 4, 7, 9, 13], "futur": [1, 3, 4, 14], "g": [7, 13], "gener": [0, 1, 2, 4, 5, 9, 14], "generate_histori": [5, 6], "generate_model": [0, 1, 5, 6, 14], "generate_sine_wav": 14, "generate_vgsl": 9, "get": [3, 4, 14], "get_activation_funct": [6, 7], "git": 3, "github": 3, "give": 3, "go": 4, "go_backward": [6, 7], "graph": [1, 2, 4, 6, 9, 10, 11], "grayscal": [1, 13], "gru": [7, 9], "guid": [4, 14], "h5": 12, "ha": [3, 10, 11, 13], "hand": 14, "handl": [1, 7, 10, 11], "have": [3, 7, 10, 11, 14], "height": [6, 7, 13], "help": [2, 4], "here": [0, 14], "histori": [6, 14], "hot": 14, "how": [2, 4, 14], "html": 1, "http": 3, "human": [1, 4, 14], "hundr": 4, "hypothet": 7, "i": [0, 2, 3, 6, 7, 9, 10, 11, 12, 13, 14], "idea": 4, "imag": [1, 2, 13], "implement": [1, 7], "import": [0, 1, 3, 7, 12, 14], "includ": [0, 1, 3, 4, 6, 9, 10, 11, 13], "incredibli": 1, "index": 2, "indic": 9, "individu": 7, "inform": 1, "initi": [1, 7, 10, 11], "input": [1, 4, 6, 7, 9, 10, 11, 14], "input_shap": [7, 10, 11], "inputconfig": [6, 7, 9], "inputlay": 9, "instal": 2, "instead": 4, "int": [7, 10, 11], "integr": 1, "interfac": [4, 7], "interpret": 4, "introduct": 2, "intuit": 4, "invalid": 9, "iter": 4, "its": [0, 1, 3, 4, 13], "itself": 3, "just": 4, "kei": 1, "kera": [0, 7, 9, 10, 12, 14], "kernel": [1, 7, 13], "kernel_s": [6, 7], "l": [7, 13], "label": 14, "languag": [1, 2, 4, 6, 9, 10, 11], "larg": 4, "larger": 1, "last": 7, "later": 0, "latest": 3, "layer": [2, 4, 5, 6, 7, 9, 14], "layer_pars": 9, "layerfactori": [6, 7, 10, 11], "learn": [0, 1, 2, 3, 4, 12, 14], "legend": 14, "len": 14, "length": [7, 14], "let": [1, 4, 14], "leverag": 4, "lf50": 14, "lf64": [7, 13], "librari": [2, 14], "like": [4, 7], "line": [1, 4], "linear": [7, 9, 13, 14], "link": 2, "list": [6, 7, 9, 10, 11], "load": [0, 14], "load_data": 14, "load_model": [0, 12], "long": 14, "look": [2, 4], "loss": 14, "ls128": 7, "lstm": [2, 7, 9], "m": [7, 13], "machin": 4, "make": [1, 2, 3, 4], "manag": [2, 3, 4], "map": [7, 9], "match": 7, "matplotlib": 14, "max": [1, 7, 9, 13], "maximum": 1, "maxpool": [1, 14], "maxpool2d": 9, "maxpooling2d": 9, "mean": 3, "mean_squared_error": 14, "meet": 3, "memori": 14, "method": [1, 4, 6, 7, 9], "metric": 14, "might": 4, "mnist": 1, "model": [2, 4, 6, 7, 9, 10, 11, 12, 13, 14], "model_nam": 6, "model_spec": [0, 6, 7], "model_to_spec": [0, 2], "modern": 4, "modifi": [3, 4], "modul": [2, 5], "more": [1, 2, 4, 6, 10, 11], "mp": [7, 13], "mp2": [1, 7, 13, 14], "must": [3, 7], "my_model": 7, "n": [7, 13], "name": [7, 10, 11], "necessari": 14, "need": [1, 3, 4, 14], "network": [1, 4, 6, 7, 14], "neural": [1, 4, 6, 7, 14], "neuron": 7, "new": [2, 9], "newer": 3, "next": [2, 14], "nn": [9, 11, 12], "non": [7, 13], "none": [1, 7, 9, 10, 11, 13, 14], "normal": 14, "note": [7, 9], "now": [0, 3, 14], "np": 14, "number": [7, 13, 14], "numpi": 14, "o": 13, "o1s10": 13, "object": [6, 7], "ocr": 4, "offer": [2, 4], "onc": [0, 1, 14], "one": [4, 14], "open": 0, "oper": [7, 13], "optim": 14, "option": [7, 10, 11, 13], "origin": [4, 10, 11], "other": 4, "our": [1, 14], "out": 1, "output": [0, 1, 6, 7, 14], "over": [1, 4, 14], "overfit": 1, "overview": [2, 13], "own": [3, 13], "p": 13, "packag": [2, 3, 5], "page": 2, "pain": 4, "paramet": [6, 7, 9, 10, 11, 12], "pars": [6, 7, 9, 10, 11, 12], "parse_activ": 9, "parse_activation_spec": [6, 7], "parse_batchnorm": 9, "parse_conv2d": 9, "parse_conv2d_spec": [6, 7], "parse_dens": 9, "parse_dense_spec": [6, 7], "parse_dropout": 9, "parse_dropout_spec": [6, 7], "parse_flatten": 9, "parse_input": 9, "parse_input_spec": [6, 7], "parse_model": 9, "parse_pool": 9, "parse_pooling2d_spec": [6, 7], "parse_reshap": 9, "parse_reshape_spec": [6, 7], "parse_rnn": 9, "parse_rnn_spec": [6, 7], "parse_spec": [6, 7], "parser": [2, 5], "part": 1, "path_to_model": 12, "path_to_your_model": 0, "per": 14, "percentag": [7, 13], "perform": 14, "pip": 3, "pixel": 1, "placehold": 9, "plan": [0, 1, 2, 3, 4], "plot": 14, "plt": 14, "point": 4, "pool": [1, 6, 7, 9, 10, 11, 13, 14], "pool_siz": [6, 7], "pool_typ": [6, 7, 9], "pooling2d": [6, 7], "pooling2dconfig": [6, 7, 9], "portion": 1, "power": [2, 4], "predict": 2, "prefer": [3, 4], "prefix": 7, "prepar": 14, "preprocess": 14, "prerequisit": 2, "prevent": 1, "previou": [1, 13], "previous": 0, "primari": 13, "print": [0, 3, 7, 12, 14], "privat": 7, "process": [4, 7], "project": 0, "prototyp": 4, "provid": [0, 1, 2, 4, 7, 9, 10, 11, 13, 14], "public": 7, "pyplot": 14, "python": 3, "pytorch": [0, 1, 2, 3, 4, 6, 7, 9, 11, 12], "pytorchlayerfactori": 7, "quick": 4, "quickli": [0, 4], "r": [0, 1, 7, 13], "r64": [7, 13], "rais": [6, 7, 9, 10, 11, 12], "randomli": 1, "rang": [9, 13, 14], "rate": [1, 6, 7, 13, 14], "rc": [1, 7], "rc2": [1, 13, 14], "rc3": [7, 13], "rd": [7, 13], "rd25": [7, 13], "re": 4, "read": 0, "readabl": [1, 4, 14], "rebuild": 0, "recogn": 7, "recurr": [7, 13], "recurrent_dropout": [6, 7], "reduc": [1, 4, 13], "refactor": 4, "refer": [1, 4], "releas": [1, 3, 4], "relu": [1, 7, 13, 14], "repositori": 3, "repres": [1, 7, 9], "represent": [0, 1], "reproduc": [0, 4], "reproduct": 4, "requir": [3, 9, 14], "research": 4, "reshap": [1, 6, 7, 9, 14], "reshapeconfig": [6, 7, 9], "resourc": 4, "respons": [10, 11], "result": 4, "return": [6, 7, 9, 10, 11, 12, 14], "return_sequ": [6, 7], "revers": 13, "rgb": 14, "rnn": [6, 7, 9, 10, 11], "rnn_type": [6, 7], "rnnconfig": [6, 7, 9], "run": 3, "s_x": [7, 13], "s_y": [7, 13], "same": 0, "scalar": 13, "scientist": 4, "seamless": 1, "search": 2, "second": 14, "section": [13, 14], "see": 14, "seek": 4, "separ": 3, "seq_length": 14, "sequenc": [2, 6, 7, 13], "sequenti": 6, "seri": 14, "serv": 1, "set": [1, 4, 13, 14], "sever": [3, 4], "shape": [1, 4, 6, 7, 9, 10, 11, 13, 14], "share": [0, 4], "short": 14, "should": [3, 9], "show": 14, "sigmoid": [7, 13], "simpl": [2, 4, 7, 14], "simplifi": [4, 14], "sin": 14, "sinc": 9, "sine": 14, "sine_wav": 14, "singl": [1, 4, 6], "size": [1, 2, 4, 6, 7, 9, 10, 11, 13, 14], "softmax": [1, 7, 13, 14], "someconcretelayerfactori": 7, "soon": 1, "sourc": [3, 6, 7, 9, 10, 11, 12], "spatial": [1, 7, 13], "spec": [1, 2, 4, 6, 7, 9, 12, 13, 14], "spec_str": 12, "specif": [0, 2, 4, 6, 7, 9, 10, 11, 14], "specifi": [1, 4, 6, 7, 10, 11, 13, 14], "stabl": 3, "start": 4, "state": 7, "step": [2, 3, 4], "store": [0, 6, 7], "str": [6, 7, 9, 10, 11, 12], "streamlin": 4, "stride": [1, 6, 7, 13, 14], "string": [0, 1, 4, 6, 7, 9, 12, 14], "structur": 1, "subclass": 7, "submodul": [2, 5], "subpackag": [2, 5], "success": 3, "successfulli": 3, "suitabl": [1, 2, 13], "summari": [0, 1], "support": [0, 1, 2, 3, 4, 6, 7, 9, 12], "sure": 3, "switch": 1, "syntax": [1, 4], "synthet": 14, "system": 3, "t": [7, 13], "take": [1, 6], "tanh": [7, 13], "target": [7, 13], "target_shap": [6, 7], "task": 4, "technic": 2, "tensor": [7, 10, 11, 13], "tensorflow": [0, 1, 2, 3, 4, 5, 6, 7, 9, 12, 14], "tensorflowlayerfactori": [6, 7, 10], "tensorflowmodelpars": 9, "term": 14, "termin": 3, "tesseract": 4, "test": [4, 14], "test_acc": 14, "test_loss": 14, "tf": [9, 10, 12, 14], "tf_parser": 2, "thei": 13, "them": [6, 9], "thi": [0, 1, 2, 3, 4, 6, 7, 9, 10, 11, 13, 14], "through": [1, 4, 14], "time": [0, 14], "time_step": 13, "timkoornstra": 3, "to_categor": 14, "tool": [2, 4], "toolkit": 4, "toolset": 4, "torch": [2, 12], "torchlayerfactori": 11, "torchmodelpars": 9, "train": [0, 1, 4, 14], "trainabl": 4, "transform": 7, "transit": 13, "translat": 1, "true": [7, 14], "tupl": [7, 10, 11], "tutori": [1, 2, 4, 13], "two": 13, "txt": 0, "type": [6, 7, 9, 10, 11, 12, 13], "typic": 13, "u": 14, "underscor": 7, "understand": [0, 2], "unidirect": 7, "union": 9, "unit": [1, 6, 7, 13, 14], "unknown": 6, "unsupport": 9, "up": 4, "updat": 9, "us": [0, 1, 2, 3, 6, 7, 9, 10, 11, 13, 14], "usag": 2, "user": [3, 4], "util": [0, 1, 2, 4, 5, 6, 9, 14], "v": 14, "validation_data": 14, "valu": [1, 13, 14], "valueerror": [6, 7, 9, 10, 11, 12], "variabl": [1, 2, 4, 6, 9, 10, 11, 13], "variou": [2, 10, 11], "ve": [0, 1], "vector": 1, "verbos": [1, 4], "verifi": 2, "version": [3, 4], "vgsl": [2, 4, 6, 7, 9, 10, 11, 12, 13, 14], "vgsl_gn": [0, 1, 14], "vgsl_model": [6, 10, 11], "vgsl_spec": [0, 1, 14], "vgslifi": [0, 1, 13, 14], "vgslmodelgener": [0, 1, 5, 6, 14], "via": [0, 3], "view": 1, "visual": 14, "w": 0, "wai": [3, 4], "walk": 1, "want": 3, "wave": 14, "we": [1, 7, 14], "well": 14, "when": [4, 13], "where": [7, 13], "whether": [3, 4, 7, 13], "which": [1, 3, 13, 14], "who": 4, "why": 2, "wide": 9, "width": [6, 7, 13], "window": [1, 7, 13], "within": 1, "without": [1, 3, 4, 6, 14], "work": [2, 3, 4], "workflow": [1, 2], "worri": 1, "write": [0, 4], "x": [7, 13, 14], "x_test": 14, "x_train": 14, "y": [7, 13, 14], "y_pred": 14, "y_test": 14, "y_train": 14, "yet": 3, "you": [0, 1, 2, 3, 4, 13, 14], "your": [0, 3, 4, 7], "z": [7, 13], "zero": 1}, "titles": ["Converting Models Back to VGSL Spec", "Getting Started", "VGSLify Documentation", "Installation", "Introduction", "vgslify", "vgslify package", "vgslify.core package", "vgslify.parser package", "vgslify.parsers package", "vgslify.tensorflow package", "vgslify.torch package", "vgslify.utils package", "Supported Layers", "Tutorials"], "titleterms": {"1": 14, "2": 14, "It": 0, "an": 14, "api": 2, "audienc": 4, "back": 0, "base_pars": 9, "bidirect": 13, "build": [1, 14], "classif": 14, "cnn": 14, "config": 7, "connect": 13, "content": [6, 7, 8, 9, 10, 11, 12], "conv2d": 13, "convert": 0, "core": 7, "creat": 14, "dens": 13, "document": [2, 4], "dropout": 13, "exampl": [0, 1, 13], "explan": 1, "factori": 7, "featur": 4, "fulli": 13, "gener": 6, "get": [1, 2], "gru": 13, "guid": 2, "how": 0, "i": [1, 4], "imag": 14, "indic": 2, "input": 13, "instal": 3, "instruct": 14, "introduct": 4, "kei": 4, "layer": [1, 10, 11, 13], "link": 4, "lstm": [13, 14], "model": [0, 1], "model_to_spec": 12, "modul": [6, 7, 8, 9, 10, 11, 12], "more": 13, "next": 1, "output": 13, "overview": [1, 4, 14], "packag": [6, 7, 8, 9, 10, 11, 12], "parser": [6, 7, 8, 9], "pooling2d": 13, "predict": 14, "prerequisit": 3, "refer": 2, "reshap": 13, "reus": 0, "rnn": 13, "save": 0, "sequenc": 14, "simpl": 1, "spec": 0, "specif": [1, 13], "start": [1, 2], "step": [1, 14], "submodul": [6, 7, 8, 9, 10, 11, 12], "subpackag": 6, "support": 13, "tabl": 2, "target": 4, "tensorflow": 10, "tf_parser": [8, 9], "torch": 11, "torch_pars": 9, "tutori": 14, "us": 4, "user": 2, "util": [7, 12], "verifi": 3, "vgsl": [0, 1], "vgslifi": [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], "what": [1, 4], "why": 4, "work": 0}}) \ No newline at end of file diff --git a/source/modules.html b/source/modules.html new file mode 100644 index 0000000..b1edc54 --- /dev/null +++ b/source/modules.html @@ -0,0 +1,158 @@ + + + + + + + + + vgslify — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/source/vgslify.core.html b/source/vgslify.core.html new file mode 100644 index 0000000..9f35d21 --- /dev/null +++ b/source/vgslify.core.html @@ -0,0 +1,1120 @@ + + + + + + + + + vgslify.core package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.core package

+
+

Submodules

+
+
+

vgslify.core.config module

+
+
+class vgslify.core.config.ActivationConfig(activation)[source]
+

Bases: object

+

Configuration for Activation layer.

+
+
Parameters:
+

activation (str) – Activation function to use.

+
+
+
+
+activation: str
+
+ +
+ +
+
+class vgslify.core.config.Conv2DConfig(activation, kernel_size, strides, filters)[source]
+

Bases: object

+

Configuration for 2D Convolutional layer.

+
+
Parameters:
+
    +
  • activation (str) – Activation function to use.

  • +
  • kernel_size (tuple) – Size of the convolution kernels.

  • +
  • strides (tuple) – Stride length of the convolution.

  • +
  • filters (int) – Number of output filters in the convolution.

  • +
+
+
+
+
+activation: str
+
+ +
+
+filters: int
+
+ +
+
+kernel_size: tuple
+
+ +
+
+strides: tuple
+
+ +
+ +
+
+class vgslify.core.config.DenseConfig(activation, units)[source]
+

Bases: object

+

Configuration for Dense (Fully Connected) layer.

+
+
Parameters:
+
    +
  • activation (str) – Activation function to use.

  • +
  • units (int) – Number of neurons in the dense layer.

  • +
+
+
+
+
+activation: str
+
+ +
+
+units: int
+
+ +
+ +
+
+class vgslify.core.config.DropoutConfig(rate)[source]
+

Bases: object

+

Configuration for Dropout layer.

+
+
Parameters:
+

rate (float) – Fraction of the input units to drop.

+
+
+
+
+rate: float
+
+ +
+ +
+
+class vgslify.core.config.InputConfig(batch_size, depth, height, width, channels)[source]
+

Bases: object

+

Configuration for Input layer.

+
+
Parameters:
+
    +
  • batch_size (int) – Size of the batches of data.

  • +
  • depth (int) – Depth of the input (for 3D inputs).

  • +
  • height (int) – Height of the input.

  • +
  • width (int) – Width of the input.

  • +
  • channels (int) – Number of channels in the input.

  • +
+
+
+
+
+batch_size: int
+
+ +
+
+channels: int
+
+ +
+
+depth: int
+
+ +
+
+height: int
+
+ +
+
+width: int
+
+ +
+ +
+
+class vgslify.core.config.Pooling2DConfig(pool_type, pool_size, strides)[source]
+

Bases: object

+

Configuration for 2D Pooling layer.

+
+
Parameters:
+
    +
  • pool_type (str) – Type of pooling operation (e.g., ‘max’, ‘average’).

  • +
  • pool_size (tuple) – Size of the pooling window.

  • +
  • strides (tuple) – Stride length of the pooling operation.

  • +
+
+
+
+
+pool_size: tuple
+
+ +
+
+pool_type: str
+
+ +
+
+strides: tuple
+
+ +
+ +
+
+class vgslify.core.config.RNNConfig(units, return_sequences, go_backwards, dropout, recurrent_dropout, rnn_type=None, bidirectional=False)[source]
+

Bases: object

+

Configuration for Recurrent Neural Network layer.

+
+
Parameters:
+
    +
  • units (int) – Number of RNN units.

  • +
  • return_sequences (bool) – Whether to return the last output or the full sequence.

  • +
  • go_backwards (bool) – If True, process the input sequence backwards.

  • +
  • dropout (float) – Fraction of the units to drop for the linear transformation of the inputs.

  • +
  • recurrent_dropout (float) – Fraction of the units to drop for the linear transformation of the recurrent state.

  • +
  • rnn_type (str, optional) – Type of RNN (e.g., ‘simple’, ‘lstm’, ‘gru’).

  • +
  • bidirectional (bool, optional) – If True, create a bidirectional RNN.

  • +
+
+
+
+
+bidirectional: bool = False
+
+ +
+
+dropout: float
+
+ +
+
+go_backwards: bool
+
+ +
+
+recurrent_dropout: float
+
+ +
+
+return_sequences: bool
+
+ +
+
+rnn_type: str = None
+
+ +
+
+units: int
+
+ +
+ +
+
+class vgslify.core.config.ReshapeConfig(target_shape)[source]
+

Bases: object

+

Configuration for Reshape layer.

+
+
Parameters:
+

target_shape (tuple) – Target shape of the output.

+
+
+
+
+target_shape: tuple
+
+ +
+ +
+
+

vgslify.core.factory module

+
+
+class vgslify.core.factory.LayerFactory(input_shape=None, data_format='channels_last')[source]
+

Bases: ABC

+

Abstract base class for creating neural network layers from VGSL specifications.

+

This class defines the interface that must be implemented by concrete factories +for different frameworks (e.g., TensorFlow, PyTorch). It also provides common +methods for output shape calculations to be used by subclasses.

+
+
Parameters:
+
    +
  • input_shape (tuple of int, optional) – The initial input shape for the model.

  • +
  • data_format (str, default 'channels_last') – The data format for the input tensor. Either ‘channels_last’ or ‘channels_first’.

  • +
+
+
+
+
+layers
+

A list to store the created layers.

+
+
Type:
+

list

+
+
+
+ +
+
+data_format
+

The data format for the input tensor.

+
+
Type:
+

str

+
+
+
+ +
+
+shape
+

The current shape of the output tensor.

+
+
Type:
+

tuple of int

+
+
+
+ +
+
+_input_shape
+

The initial input shape for the model.

+
+
Type:
+

tuple of int

+
+
+
+ +

Notes

+

This is an abstract base class. Use a concrete implementation like +TensorFlowLayerFactory or PyTorchLayerFactory in your code.

+

This class uses a naming convention where public methods for creating layers +(e.g., conv2d) have corresponding private methods with an underscore prefix +(e.g., _conv2d) that handle the actual layer creation.

+

Examples

+
>>> # Assuming we have a TensorFlowLayerFactory implementation
+>>> factory = TensorFlowLayerFactory(input_shape=(224, 224, 3))
+>>> factory.conv2d('Cr3,3,32')
+>>> factory.pooling2d('Mp2,2,2,2')
+>>> factory.flatten('Flt')
+>>> factory.dense('Fs128')
+>>> model = factory.build('my_model')
+
+
+
+
+activation(spec)[source]
+

Create an Activation layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Activation layer.

+
+
Returns:
+

The created Activation layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(128,))
+>>> factory.activation('Ar')
+
+
+
+ +
+
+batchnorm(spec)[source]
+

Create a BatchNormalization layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the BatchNormalization layer.

+
+
Returns:
+

The created BatchNormalization layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 32))
+>>> factory.batchnorm('Bn')
+
+
+
+ +
+
+abstract build(name)[source]
+

Abstract method to build the final model using the created layers.

+
+
Parameters:
+

name (str) – The name of the model.

+
+
Returns:
+

The final built model.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1))
+>>> factory.conv2d('Cr3,3,32')
+>>> factory.flatten('Flt')
+>>> factory.dense('Fs10')
+>>> model = factory.build('my_model')
+
+
+
+ +
+
+conv2d(spec)[source]
+

Create a 2D Convolutional layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Conv2D layer.

+
+
Returns:
+

The created Conv2D layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1))
+>>> factory.conv2d('Cr3,3,32')
+
+
+
+ +
+
+dense(spec)[source]
+

Create a Dense (Fully Connected) layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Dense layer.

+
+
Returns:
+

The created Dense layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(7*7*32,))
+>>> factory.dense('Fs128')
+
+
+
+ +
+
+dropout(spec)[source]
+

Create a Dropout layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Dropout layer.

+
+
Returns:
+

The created Dropout layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(128,))
+>>> factory.dropout('D50')
+
+
+
+ +
+
+flatten(spec)[source]
+

Create a Flatten layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Flatten layer.

+
+
Returns:
+

The created Flatten layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(7, 7, 64))
+>>> factory.flatten('Flt')
+
+
+
+ +
+
+input(spec)[source]
+

Create an Input layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Input layer.

+
+
Returns:
+

The created Input layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory()
+>>> factory.input('1,28,28,1')
+
+
+
+ +
+
+pooling2d(spec)[source]
+

Create a 2D Pooling layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the Pooling2D layer.

+
+
Returns:
+

The created Pooling2D layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 32))
+>>> factory.pooling2d('Mp2,2,2,2')
+
+
+
+ +
+
+reshape(spec)[source]
+

Create a Reshape layer based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – VGSL specification string for the Reshape layer. Can be: +- ‘Rc(2|3)’: Collapse spatial dimensions (height, width, and channels). +- ‘R<x>,<y>,<z>’: Reshape to the specified target shape.

+
+
Returns:
+

The created Reshape layer.

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28, 1))
+>>> factory.reshape('Rc3')
+
+
+
+ +
+
+rnn(spec)[source]
+

Create an RNN layer (LSTM or GRU), either unidirectional or bidirectional, based on the VGSL specification string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for the RNN layer.

+
+
Returns:
+

The created RNN layer (either unidirectional or bidirectional).

+
+
Return type:
+

Any

+
+
+

Examples

+
>>> # Using a hypothetical concrete implementation
+>>> factory = SomeConcreteLayerFactory(input_shape=(28, 28))
+>>> factory.rnn('Ls128')  # Unidirectional LSTM
+>>> factory.rnn('Bl128')  # Bidirectional LSTM
+
+
+
+ +
+ +
+
+

vgslify.core.parser module

+
+
+vgslify.core.parser.parse_activation_spec(spec)[source]
+

Parses a VGSL specification string for an Activation layer and returns the activation function.

+
+
Parameters:
+

spec (str) – VGSL specification for the Activation layer. Expected format: A(s|t|r|l|m) +- s: softmax +- t: tanh +- r: relu +- l: linear +- m: sigmoid

+
+
Returns:
+

The activation function name.

+
+
Return type:
+

str

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> activation = parse_activation_spec("Ar")
+>>> print(activation)
+'relu'
+
+
+
+ +
+
+vgslify.core.parser.parse_conv2d_spec(spec)[source]
+

Parses a VGSL specification string for a Conv2D layer and returns the parsed configuration.

+
+
Parameters:
+

spec (str) – VGSL specification for the convolutional layer. Expected format: +C(s|t|r|l|m)<x>,<y>,[<s_x>,<s_y>,]<d> +- (s|t|r|l|m): Activation type. +- <x>,<y>: Kernel size. +- <s_x>,<s_y>: Optional strides (defaults to (1, 1) if not provided). +- <d>: Number of filters (depth).

+
+
Returns:
+

Parsed configuration for the Conv2D layer.

+
+
Return type:
+

Conv2DConfig

+
+
Raises:
+

ValueError: – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> from vgslify.core.parser import parse_conv2d_spec
+>>> config = parse_conv2d_spec("Cr3,3,64")
+>>> print(config)
+Conv2DConfig(activation='relu', kernel_size=(3, 3), strides=(1, 1), filters=64)
+
+
+
+ +
+
+vgslify.core.parser.parse_dense_spec(spec)[source]
+

Parses a VGSL specification string for a Dense layer and returns the parsed configuration.

+
+
Parameters:
+

spec (str) –

VGSL specification for the dense layer. Expected format: F(s|t|r|l|m)<d> +- (s|t|r|l|m): Non-linearity type. One of sigmoid, tanh, relu,

+
+

linear, or softmax.

+
+
    +
  • <d>: Number of outputs (units).

  • +
+

+
+
Returns:
+

Parsed configuration for the Dense layer.

+
+
Return type:
+

DenseConfig

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_dense_spec("Fr64")
+>>> print(config)
+DenseConfig(activation='relu', units=64)
+
+
+
+ +
+
+vgslify.core.parser.parse_dropout_spec(spec)[source]
+

Parses a VGSL specification string for a Dropout layer and returns the parsed configuration.

+
+
Parameters:
+

spec (str) – VGSL specification for the Dropout layer. Expected format: +D<rate> where <rate> is the dropout percentage (0-100).

+
+
Returns:
+

Parsed configuration for the Dropout layer.

+
+
Return type:
+

DropoutConfig

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_dropout_spec("D50")
+>>> print(config)
+DropoutConfig(rate=0.5)
+
+
+
+ +
+
+vgslify.core.parser.parse_input_spec(spec)[source]
+

Parses a VGSL specification string for an Input layer and returns the parsed configuration.

+
+
Parameters:
+

spec (str) – VGSL specification for the Input layer. Supported format: +<batch_size>,<depth>,<height>,<width>,<channels> for 4D inputs, +<batch_size>,<height>,<width>,<channels> for 3D inputs, +<batch_size>,<height>,<width> for 2D inputs, +<batch_size>,<width> for 1D inputs.

+
+
Returns:
+

Parsed configuration for the Input layer.

+
+
Return type:
+

InputConfig

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_input_spec("None,224,224,3")
+>>> print(config)
+InputConfig(batch_size=None, width=224, depth=None, height=224, channels=3)
+
+
+
+ +
+
+vgslify.core.parser.parse_pooling2d_spec(spec)[source]
+

Parses a VGSL specification string for a Pooling2D layer and returns the parsed configuration.

+
+
Parameters:
+

spec (str) – VGSL specification for the pooling layer. Expected format: +Mp<x>,<y>[,<s_x>,<s_y>] or Ap<x>,<y>[,<s_x>,<s_y>] +- <x>,<y>: Pool size. +- <s_x>,<s_y>: Strides. If not specified, defaults to pool size.

+
+
Returns:
+

Parsed configuration for the Pooling2D layer.

+
+
Return type:
+

Pooling2DConfig

+
+
Raises:
+

ValueError: – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_pooling2d_spec("Mp2,2")
+>>> print(config)
+Pooling2DConfig(pool_size=(2, 2), strides=(2, 2))
+>>> config = parse_pooling2d_spec("Mp2,2,1,1")
+>>> print(config)
+Pooling2DConfig(pool_size=(2, 2), strides=(1, 1))
+
+
+
+ +
+
+vgslify.core.parser.parse_reshape_spec(spec)[source]
+

Parses a VGSL specification string for a Reshape layer and returns the target shape.

+
+
Parameters:
+

spec (str) – VGSL specification for the Reshape layer. Expected format: R<x>,<y>,<z>

+
+
Returns:
+

Parsed configuration for the Reshape layer.

+
+
Return type:
+

ReshapeConfig

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_reshape_spec("R64,64,3")
+>>> print(config)
+ReshapeConfig(target_shape=(64, 64, 3))
+
+
+
+ +
+
+vgslify.core.parser.parse_rnn_spec(spec)[source]
+

Parses a VGSL specification string for an RNN layer (LSTM, GRU, Bidirectional) +and returns the parsed configuration.

+
+
Parameters:
+

spec (str) – VGSL specification for the RNN layer. Expected format: +For LSTM/GRU: (L|G)(f|r)[s]<n>[,D<rate>,Rd<rate>] +For Bidirectional: B(g|l)<n>[,D<rate>,Rd<rate>]

+
+
Returns:
+

Parsed configuration for the RNN layer.

+
+
Return type:
+

RNNConfig

+
+
Raises:
+

ValueError – If the provided VGSL spec string does not match the expected format.

+
+
+

Examples

+
>>> config = parse_rnn_spec("Lf64,D50,Rd25")
+>>> print(config)
+RNNConfig(units=64, return_sequences=True, go_backwards=False, dropout=0.5,
+          recurrent_dropout=0.25)
+
+
+
+ +
+
+vgslify.core.parser.parse_spec(model_spec)[source]
+

Parse the full model spec string into a list of individual layer specs.

+
+
Parameters:
+

model_spec (str) – The VGSL specification string defining the model architecture.

+
+
Returns:
+

A list of layer specification strings.

+
+
Return type:
+

list

+
+
+
+ +
+
+

vgslify.core.utils module

+
+
+vgslify.core.utils.get_activation_function(activation_char)[source]
+

Maps a VGSL activation character to the corresponding Keras activation function.

+
+
Parameters:
+

activation_char (str) – The character representing the activation function in the VGSL spec.

+
+
Returns:
+

The name of the Keras activation function.

+
+
Return type:
+

str

+
+
Raises:
+

ValueError – If the provided activation character is not recognized.

+
+
+

Examples

+
>>> activation = get_activation_function('r')
+>>> print(activation)
+'relu'
+
+
+
+ +
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.html b/source/vgslify.html new file mode 100644 index 0000000..9759d17 --- /dev/null +++ b/source/vgslify.html @@ -0,0 +1,344 @@ + + + + + + + + + vgslify package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify package

+
+

Subpackages

+
+ +
+
+
+

Submodules

+
+
+

vgslify.generator module

+
+
+class vgslify.generator.VGSLModelGenerator(backend='auto')[source]
+

Bases: object

+

VGSLModelGenerator constructs a neural network model based on a VGSL (Variable-size Graph +Specification Language) specification string. This class supports dynamic model generation +for different backends, with current support for TensorFlow and PyTorch.

+

The generator takes a VGSL specification string that defines the architecture of the neural +network, including the input layer, convolutional layers, pooling layers, RNN layers, dense +layers, and more. The class parses this string, constructs the layers in sequence, and builds +the final model.

+
+
+construct_layer(spec)[source]
+

Constructs a single layer using the layer factory based on the spec string.

+
+
Parameters:
+

spec (str) – The VGSL specification string for a layer.

+
+
Returns:
+

The constructed layer.

+
+
Return type:
+

Any

+
+
Raises:
+

ValueError – If the layer specification is unknown.

+
+
+
+ +
+
+generate_history(model_spec)[source]
+

Generate the history of layer specifications without building the full model.

+

This method parses the VGSL specification string, constructs each layer using +the layer factory, and stores them in a list, but does not chain them or connect +input/output layers.

+
+
Parameters:
+

model_spec (str) – The VGSL specification string defining the model architecture.

+
+
Returns:
+

A list of layers constructed from the specification string.

+
+
Return type:
+

list

+
+
+
+ +
+
+generate_model(model_spec, model_name='VGSL_Model')[source]
+

Build the model based on the VGSL spec string.

+

This method parses the VGSL specification string, creates each layer +using the layer factory, and constructs the model sequentially.

+
+
Parameters:
+

model_spec (str) – The VGSL specification string defining the model architecture.

+
+
Returns:
+

The built model using the specified backend.

+
+
Return type:
+

Any

+
+
+
+ +
+ +
+
+

vgslify.parser module

+
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.parser.html b/source/vgslify.parser.html new file mode 100644 index 0000000..f56dc2f --- /dev/null +++ b/source/vgslify.parser.html @@ -0,0 +1,140 @@ + + + + + + + + + vgslify.parser package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.parser package

+
+

Submodules

+
+
+

vgslify.parser.tf_parser module

+
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.parsers.html b/source/vgslify.parsers.html new file mode 100644 index 0000000..a5c983d --- /dev/null +++ b/source/vgslify.parsers.html @@ -0,0 +1,719 @@ + + + + + + + + + vgslify.parsers package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.parsers package

+
+

Submodules

+
+
+

vgslify.parsers.base_parser module

+
+
+class vgslify.parsers.base_parser.BaseModelParser[source]
+

Bases: ABC

+

Abstract base class for model parsers. +Provides common utility methods for parsing different frameworks and generating VGSL spec strings.

+
+
+generate_vgsl(configs)[source]
+

Convert a list of layer configuration dataclasses into a VGSL specification string.

+
+
Parameters:
+

configs (List[Union[Conv2DConfig, Pooling2DConfig, DenseConfig, RNNConfig,) – DropoutConfig, ReshapeConfig, InputConfig, ActivationConfig]] +List of layer configurations.

+
+
Returns:
+

VGSL specification string.

+
+
Return type:
+

str

+
+
+
+ +
+
+abstract parse_activation(layer)[source]
+

Parse the Activation layer into a ActivationConfig dataclass.

+
+
Return type:
+

ActivationConfig

+
+
+
+ +
+
+abstract parse_batchnorm(layer)[source]
+

Parse the BatchNorm layer into a VGSL spec string.

+
+
Return type:
+

str

+
+
+
+ +
+
+abstract parse_conv2d(layer)[source]
+

Parse the Conv2D layer into a Conv2DConfig dataclass.

+
+
Return type:
+

Conv2DConfig

+
+
+
+ +
+
+abstract parse_dense(layer)[source]
+

Parse the Dense layer into a DenseConfig dataclass.

+
+
Return type:
+

DenseConfig

+
+
+
+ +
+
+abstract parse_dropout(layer)[source]
+

Parse the Dropout layer into a DropoutConfig dataclass.

+
+
Return type:
+

DropoutConfig

+
+
+
+ +
+
+abstract parse_flatten(layer)[source]
+

Parse the Flatten layer into a VGSL spec string.

+
+
Return type:
+

str

+
+
+
+ +
+
+abstract parse_input(layer)[source]
+

Parse the input layer into a InputConfig dataclass.

+
+
Return type:
+

InputConfig

+
+
+
+ +
+
+abstract parse_model(model)[source]
+

Parse the model into a VGSL spec string.

+
+
Return type:
+

str

+
+
+
+ +
+
+abstract parse_pooling(layer)[source]
+

Parse the Pooling layer into a Pooling2DConfig dataclass.

+
+
Return type:
+

Pooling2DConfig

+
+
+
+ +
+
+abstract parse_reshape(layer)[source]
+

Parse the Reshape layer into a ReshapeConfig dataclass.

+
+
Return type:
+

ReshapeConfig

+
+
+
+ +
+
+abstract parse_rnn(layer)[source]
+

Parse the RNN layer into a RNNConfig dataclass.

+
+
Return type:
+

RNNConfig

+
+
+
+ +
+ +
+
+

vgslify.parsers.tf_parser module

+
+
+class vgslify.parsers.tf_parser.TensorFlowModelParser[source]
+

Bases: BaseModelParser

+

Parser for converting TensorFlow Keras models into VGSL (Variable-size Graph Specification Language) spec strings.

+

This class extends the BaseModelParser to provide specific functionality for TensorFlow Keras models. +It uses configuration dataclasses to represent different layer types and converts them into +VGSL spec strings.

+
+
+layer_parsers
+

A dictionary mapping TensorFlow Keras layer types to their corresponding parsing methods.

+
+
Type:
+

Dict[Type[tf.keras.layers.Layer], Callable]

+
+
+
+ +

Notes

+

This parser supports a wide range of TensorFlow Keras layers and can be extended to support +additional layer types by adding new parsing methods and updating the layer_parsers dictionary.

+
+
+parse_activation(layer)[source]
+

Parse an Activation layer.

+
+
Parameters:
+

layer (tf.keras.layers.Activation) – The Activation layer to parse.

+
+
Returns:
+

The configuration for the Activation layer.

+
+
Return type:
+

ActivationConfig

+
+
+
+ +
+
+parse_batchnorm(layer)[source]
+

Parse a BatchNormalization layer. +Since BatchNormalization does not require a VGSL spec beyond ‘Bn’, return a placeholder.

+
+
Parameters:
+

layer (tf.keras.layers.BatchNormalization) – The BatchNormalization layer to parse.

+
+
Returns:
+

Indicates that the VGSL spec should include ‘Bn’.

+
+
Return type:
+

None

+
+
+
+ +
+
+parse_conv2d(layer)[source]
+

Parse a Conv2D layer into a Conv2DConfig dataclass.

+
+
Parameters:
+

layer (tf.keras.layers.Conv2D) – The Conv2D layer to parse.

+
+
Returns:
+

The configuration for the Conv2D layer.

+
+
Return type:
+

Conv2DConfig

+
+
+
+ +
+
+parse_dense(layer)[source]
+

Parse a Dense layer into a DenseConfig dataclass.

+
+
Parameters:
+

layer (tf.keras.layers.Dense) – The Dense layer to parse.

+
+
Returns:
+

The configuration for the Dense layer.

+
+
Return type:
+

DenseConfig

+
+
+
+ +
+
+parse_dropout(layer)[source]
+

Parse a Dropout layer into a DropoutConfig dataclass.

+
+
Parameters:
+

layer (tf.keras.layers.Dropout) – The Dropout layer to parse.

+
+
Returns:
+

The configuration for the Dropout layer.

+
+
Return type:
+

DropoutConfig

+
+
+
+ +
+
+parse_flatten(layer)[source]
+

Parse a Flatten layer. +Since Flatten does not require a VGSL spec beyond ‘Flatten’, return a placeholder.

+
+
Parameters:
+

layer (tf.keras.layers.Flatten) – The Flatten layer to parse.

+
+
Returns:
+

Indicates that the VGSL spec should include ‘Flatten’.

+
+
Return type:
+

None

+
+
+
+ +
+
+parse_input(layer)[source]
+

Parse an InputLayer into an InputConfig dataclass.

+
+
Parameters:
+

layer (tf.keras.layers.InputLayer) – The InputLayer to parse.

+
+
Returns:
+

The configuration for the input layer.

+
+
Return type:
+

InputConfig

+
+
+
+ +
+
+parse_model(model)[source]
+

Parse a TensorFlow Keras model into a VGSL spec string.

+
+
Parameters:
+

model (tf.keras.models.Model) – Keras model to be converted.

+
+
Returns:
+

VGSL spec string.

+
+
Return type:
+

str

+
+
Raises:
+

ValueError – If the model contains unsupported layers or if the input shape is invalid.

+
+
+
+ +
+
+parse_pooling(layer, pool_type)[source]
+

Parse a Pooling layer into a Pooling2DConfig dataclass.

+
+
Parameters:
+
    +
  • layer (tf.keras.layers.MaxPooling2D or tf.keras.layers.AveragePooling2D) – The Pooling layer to parse.

  • +
  • pool_type (str) – Type of pooling (‘max’ or ‘average’).

  • +
+
+
Returns:
+

The configuration for the Pooling layer.

+
+
Return type:
+

Pooling2DConfig

+
+
+
+ +
+
+parse_reshape(layer)[source]
+

Parse a Reshape layer into a ReshapeConfig dataclass.

+
+
Parameters:
+

layer (tf.keras.layers.Reshape) – The Reshape layer to parse.

+
+
Returns:
+

The configuration for the Reshape layer.

+
+
Return type:
+

ReshapeConfig

+
+
+
+ +
+
+parse_rnn(layer)[source]
+

Parse an RNN layer (LSTM, GRU, or Bidirectional) into an RNNConfig dataclass.

+
+
Parameters:
+

layer (Union[tf.keras.layers.LSTM, tf.keras.layers.GRU, tf.keras.layers.Bidirectional]) – The RNN layer to parse.

+
+
Returns:
+

The configuration for the RNN layer.

+
+
Return type:
+

RNNConfig

+
+
+
+ +
+ +
+
+

vgslify.parsers.torch_parser module

+
+
+class vgslify.parsers.torch_parser.TorchModelParser[source]
+

Bases: BaseModelParser

+

Parser for converting PyTorch models into VGSL (Variable-size Graph Specification Language) spec strings.

+

This class extends the BaseModelParser to provide specific functionality for PyTorch models. +It uses configuration dataclasses to represent different layer types and converts them into +VGSL spec strings.

+
+
+layer_parsers
+

A dictionary mapping PyTorch layer types to their corresponding parsing methods.

+
+
Type:
+

Dict[Type[nn.Module], Callable]

+
+
+
+ +

Notes

+

This parser supports a wide range of PyTorch layers and can be extended to support +additional layer types by adding new parsing methods and updating the layer_parsers dictionary.

+
+
+parse_activation(layer)[source]
+

Parse an activation function.

+
+
Parameters:
+

layer (nn.Module) – The activation layer to parse.

+
+
Returns:
+

The configuration for the Activation layer.

+
+
Return type:
+

ActivationConfig

+
+
+
+ +
+
+parse_batchnorm(layer)[source]
+

Parse a BatchNorm2d layer.

+
+
Parameters:
+

layer (nn.BatchNorm2d) – The BatchNorm2d layer to parse.

+
+
Returns:
+

Indicates that the VGSL spec should include ‘Bn’.

+
+
Return type:
+

str

+
+
+
+ +
+
+parse_conv2d(layer)[source]
+

Parse a Conv2d layer into a Conv2DConfig dataclass.

+
+
Parameters:
+

layer (nn.Conv2d) – The Conv2d layer to parse.

+
+
Returns:
+

The configuration for the Conv2D layer.

+
+
Return type:
+

Conv2DConfig

+
+
+
+ +
+
+parse_dense(layer)[source]
+

Parse a Linear layer into a DenseConfig dataclass.

+
+
Parameters:
+

layer (nn.Linear) – The Linear layer to parse.

+
+
Returns:
+

The configuration for the Dense layer.

+
+
Return type:
+

DenseConfig

+
+
+
+ +
+
+parse_dropout(layer)[source]
+

Parse a Dropout layer into a DropoutConfig dataclass.

+
+
Parameters:
+

layer (nn.Dropout) – The Dropout layer to parse.

+
+
Returns:
+

The configuration for the Dropout layer.

+
+
Return type:
+

DropoutConfig

+
+
+
+ +
+
+parse_flatten(layer)[source]
+

Parse a Flatten layer.

+
+
Parameters:
+

layer (nn.Flatten) – The Flatten layer to parse.

+
+
Returns:
+

Indicates that the VGSL spec should include ‘Flatten’.

+
+
Return type:
+

str

+
+
+
+ +
+
+parse_input(layer)[source]
+

Parse the input shape from the first layer of the model.

+
+
Parameters:
+

layer (nn.Module) – The first layer of the PyTorch model.

+
+
Returns:
+

The configuration for the input layer.

+
+
Return type:
+

InputConfig

+
+
Raises:
+

ValueError – If the input shape cannot be determined.

+
+
+
+ +
+
+parse_model(model)[source]
+

Parse a PyTorch model into a VGSL spec string.

+
+
Parameters:
+

model (nn.Module) – PyTorch model to be converted.

+
+
Returns:
+

VGSL spec string.

+
+
Return type:
+

str

+
+
Raises:
+

ValueError – If the model contains unsupported layers or if the input shape is invalid.

+
+
+
+ +
+
+parse_pooling(layer)[source]
+

Parse a Pooling layer into a Pooling2DConfig dataclass.

+
+
Parameters:
+

layer (nn.MaxPool2d or nn.AvgPool2d) – The Pooling layer to parse.

+
+
Returns:
+

The configuration for the Pooling layer.

+
+
Return type:
+

Pooling2DConfig

+
+
+
+ +
+
+parse_reshape(layer)[source]
+

Parse a Reshape layer into a ReshapeConfig dataclass.

+
+
Parameters:
+

layer (Reshape) – The custom Reshape layer to parse.

+
+
Returns:
+

The configuration for the Reshape layer.

+
+
Return type:
+

ReshapeConfig

+
+
+
+ +
+
+parse_rnn(layer)[source]
+

Parse an RNN layer (LSTM or GRU) into an RNNConfig dataclass.

+
+
Parameters:
+

layer (Union[nn.LSTM, nn.GRU]) – The RNN layer to parse.

+
+
Returns:
+

The configuration for the RNN layer.

+
+
Return type:
+

RNNConfig

+
+
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.tensorflow.html b/source/vgslify.tensorflow.html new file mode 100644 index 0000000..93eba56 --- /dev/null +++ b/source/vgslify.tensorflow.html @@ -0,0 +1,232 @@ + + + + + + + + + vgslify.tensorflow package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.tensorflow package

+
+

Submodules

+
+
+

vgslify.tensorflow.layers module

+
+
+class vgslify.tensorflow.layers.TensorFlowLayerFactory(input_shape=None)[source]
+

Bases: LayerFactory

+

TensorFlowLayerFactory is responsible for creating TensorFlow-specific layers based on parsed +VGSL (Variable-size Graph Specification Language) specifications.

+

This factory handles the creation of various types of layers, including convolutional layers, +pooling layers, RNN layers, dense layers, activation layers, and more.

+
+
+layers
+

A list of TensorFlow layers that have been added to the factory.

+
+
Type:
+

list

+
+
+
+ +
+
+shape
+

The current shape of the tensor, excluding the batch size.

+
+
Type:
+

tuple of int

+
+
+
+ +
+
+_input_shape
+

The original input shape provided during initialization.

+
+
Type:
+

tuple of int or None

+
+
+
+ +
+
+build(name='VGSL_Model')[source]
+

Build the final model using the accumulated layers.

+
+
Parameters:
+

name (str, optional) – The name of the model, by default “VGSL_Model”

+
+
Returns:
+

The constructed TensorFlow model.

+
+
Return type:
+

tf.keras.models.Model

+
+
Raises:
+
    +
  • ValueError – If no layers have been added to the model.

  • +
  • ValueError – If no input shape has been specified for the model.

  • +
+
+
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.torch.html b/source/vgslify.torch.html new file mode 100644 index 0000000..f7e2896 --- /dev/null +++ b/source/vgslify.torch.html @@ -0,0 +1,213 @@ + + + + + + + + + vgslify.torch package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.torch package

+
+

Submodules

+
+
+

vgslify.torch.layers module

+
+
+class vgslify.torch.layers.TorchLayerFactory(input_shape=None)[source]
+

Bases: LayerFactory

+

TorchLayerFactory is responsible for creating PyTorch-specific layers based on parsed +VGSL (Variable-size Graph Specification Language) specifications.

+

This factory handles the creation of various types of layers, including convolutional layers, +pooling layers, RNN layers, dense layers, activation layers, and more.

+
+
+layers
+

A list of PyTorch layers that have been added to the factory.

+
+
Type:
+

list

+
+
+
+ +
+
+shape
+

The current shape of the tensor, excluding the batch size.

+
+
Type:
+

tuple of int

+
+
+
+ +
+
+_input_shape
+

The original input shape provided during initialization.

+
+
Type:
+

tuple of int or None

+
+
+
+ +
+
+build(name='VGSL_Model')[source]
+

Build the final model using the accumulated layers.

+
+
Parameters:
+

name (str, optional) – The name of the model, by default “VGSL_Model”

+
+
Returns:
+

The constructed PyTorch model.

+
+
Return type:
+

torch.nn.Module

+
+
Raises:
+
    +
  • ValueError – If no layers have been added to the model.

  • +
  • ValueError – If no input shape has been specified for the model.

  • +
+
+
+
+ +
+ +
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/source/vgslify.utils.html b/source/vgslify.utils.html new file mode 100644 index 0000000..b87dbd1 --- /dev/null +++ b/source/vgslify.utils.html @@ -0,0 +1,172 @@ + + + + + + + + + vgslify.utils package — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

vgslify.utils package

+
+

Submodules

+
+
+

vgslify.utils.model_to_spec module

+
+
+vgslify.utils.model_to_spec.model_to_spec(model)[source]
+

Convert a deep learning model (TensorFlow or PyTorch) to a VGSL spec string.

+
+
Parameters:
+

model (Model) – The deep learning model to be converted. Can be a TensorFlow model (tf.keras.models.Model) +or a PyTorch model (torch.nn.Module).

+
+
Returns:
+

VGSL spec string.

+
+
Return type:
+

str

+
+
Raises:
+

ValueError – If the model is not supported or cannot be parsed.

+
+
+

Examples

+
>>> from vgslify.utils import model_to_spec
+>>> import tensorflow as tf
+>>> model = tf.keras.models.load_model("path_to_model.h5")
+>>> spec_string = model_to_spec(model)
+>>> print(spec_string)
+
+
+
+ +
+
+

Module contents

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/supported_layers.html b/supported_layers.html new file mode 100644 index 0000000..b480d78 --- /dev/null +++ b/supported_layers.html @@ -0,0 +1,287 @@ + + + + + + + + + Supported Layers — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Supported Layers

+

VGSLify supports a range of layers that can be specified using the VGSL format. Each layer type has its own configuration format, allowing you to define models concisely and flexibly. This section provides an overview of the supported layers and their VGSL specifications.

+
+

Layer Specifications

+
+

Input Layer

+
    +
  • VGSL Spec: <batch_size>,<height>,<width>[,<depth>,<channels>]

  • +
  • Description: Defines the input shape for the model, where the first value is the batch size (set to None for variable), followed by the height, width, and optionally the depth and channels.

  • +
  • Example: None,28,28,1

    +
      +
    • Defines an input layer with variable batch size, height and width of 28, and 1 channel (e.g., for grayscale images).

    • +
    +
  • +
+
+
+

Conv2D Layer

+
    +
  • VGSL Spec: C(s|t|r|l|m),<x>,<y>,[<s_x>,<s_y>,]<d>

  • +
  • Description: Defines a 2D convolutional layer with a kernel size of <x> by <y>, optional strides <s_x>,<s_y>, and <d> filters. Activation functions are specified as follows:

    +
      +
    • s: Sigmoid

    • +
    • t: Tanh

    • +
    • r: ReLU

    • +
    • l: Linear

    • +
    • m: Softmax

    • +
    +
  • +
  • Example: Cr3,3,32

    +
      +
    • Adds a convolutional layer with ReLU activation, a 3x3 kernel, default strides (1,1), and 32 filters.

    • +
    +
  • +
+
+
+

Pooling2D Layer

+
    +
  • VGSL Spec: <p>(<x>,<y>[,<s_x>,<s_y>])

    +
      +
    • Mp for max-pooling, Ap for average pooling.

    • +
    +
  • +
  • Description: Specifies a pooling operation, which reduces the spatial dimensions by applying a window of <x> by <y> and strides of <s_x>,<s_y>. If strides are not specified, they default to the pool size.

  • +
  • Example: Mp2,2,1,1

    +
      +
    • Defines a max-pooling layer with a pool size of 2x2 and strides of 1x1.

    • +
    +
  • +
+
+
+

Dense (Fully Connected) Layer

+
    +
  • VGSL Spec: F(s|t|r|l|m)<d>

  • +
  • Description: Defines a fully connected (dense) layer with <d> units. The non-linearity can be:

    +
      +
    • s: Sigmoid

    • +
    • t: Tanh

    • +
    • r: ReLU

    • +
    • l: Linear

    • +
    • m: Softmax

    • +
    +
  • +
  • Example: Fr64

    +
      +
    • Adds a dense layer with 64 units and ReLU activation.

    • +
    +
  • +
+
+
+

RNN Layer (LSTM/GRU/Bidirectional)

+
    +
  • VGSL Spec: L(f|r)[s]<n>[,D<rate>,Rd<rate>] for LSTM/GRU, B(g|l)<n>[,D<rate>,Rd<rate>] for Bidirectional RNN

  • +
  • Description: Specifies an RNN layer with n units. The optional dropout D and recurrent dropout Rd rates can be included.

    +
      +
    • L: LSTM

    • +
    • G: GRU

    • +
    • B: Bidirectional

    • +
    • f: Forward direction, r: Reverse direction, g: GRU, l: LSTM

    • +
    +
  • +
  • Example: Lf64,D50,Rd25

    +
      +
    • Defines an LSTM layer with 64 units, 50% dropout, and 25% recurrent dropout.

    • +
    +
  • +
+
+
+

Dropout Layer

+
    +
  • VGSL Spec: D<rate>

  • +
  • Description: Specifies a dropout layer, where <rate> is the dropout percentage (0–100).

  • +
  • Example: D50

    +
      +
    • Adds a dropout layer with a 50% dropout rate.

    • +
    +
  • +
+
+
+

Output Layer

+
    +
  • VGSL Spec: O(2|1|0)(l|s)<n>

  • +
  • Description: Defines the output layer. The first value (2, 1, or 0) specifies whether the output is 2D, 1D, or scalar, followed by the activation type (l: linear, s: softmax), and the number of output units (n).

  • +
  • Example: O1s10

    +
      +
    • Defines a softmax output layer with 10 classes for a 1D sequence.

    • +
    +
  • +
+
+
+

Reshape Layer

+
    +
  • VGSL Spec: Rc2, Rc3, or R<x>,<y>,<z>

  • +
  • Description: The Reshape layer reshapes the output tensor from the previous layer. It has two primary functions:

    +
      +
    • Rc2: Collapses the spatial dimensions (height, width, and channels) into a 2D tensor. This is typically used when transitioning to a fully connected (dense) layer.

      +
        +
      • Example: Reshaping from (batch_size, height, width, channels) to (batch_size, height * width * channels).

      • +
      +
    • +
    • Rc3: Collapses the spatial dimensions into a 3D tensor suitable for RNN layers. This creates a 3D tensor in the form of (batch_size, time_steps, features).

      +
        +
      • Example: Reshaping from (batch_size, height, width, channels) to (batch_size, height * width, channels) for input to LSTM or GRU layers.

      • +
      +
    • +
    • R<x>,<y>,<z>: Directly reshapes to the specified target shape.

    • +
    +
  • +
  • Example:

    +
      +
    • Rc2 collapses the output from (None, 8, 8, 64) to (None, 4096) for a fully connected layer.

    • +
    • Rc3 collapses the output from (None, 8, 8, 64) to (None, 64, 64) for input to an RNN layer.

    • +
    • R64,64,3 reshapes the output to (None, 64, 64, 3).

    • +
    +
  • +
+
+
+
+

More Examples

+

Explore additional examples and advanced configurations in the tutorials.

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/tutorials.html b/tutorials.html new file mode 100644 index 0000000..0c76515 --- /dev/null +++ b/tutorials.html @@ -0,0 +1,297 @@ + + + + + + + + + Tutorials — VGSLify 0.13.0 documentation + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tutorials

+

This section provides hands-on tutorials for using VGSLify to build and train deep learning models. Follow these step-by-step guides to get familiar with how VGSLify simplifies model creation through VGSL specifications.

+
+

Tutorial 1: Building a CNN for Image Classification

+
+

Overview

+

In this tutorial, you will build a Convolutional Neural Network (CNN) for image classification using the CIFAR-10 dataset. We will define the model using a VGSL spec string, which allows us to specify the architecture in a concise, human-readable format. By the end of this tutorial, you will have a fully trained CNN model for image classification.

+
+
+

Step-by-Step Instructions

+
    +
  1. Import required libraries:

    +

    Begin by importing the necessary libraries for TensorFlow and VGSLify.

    +
    import tensorflow as tf
    +from vgslify.generator import VGSLModelGenerator
    +
    +
    +
  2. +
  3. Load and preprocess the dataset:

    +

    CIFAR-10 is a dataset of 60,000 32x32 color images in 10 classes, with 6,000 images per class. You can load and preprocess the dataset as follows:

    +
    (x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()
    +
    +# Normalize the images to the range [0, 1]
    +x_train, x_test = x_train / 255.0, x_test / 255.0
    +
    +# Convert labels to one-hot encoding
    +y_train = tf.keras.utils.to_categorical(y_train, 10)
    +y_test = tf.keras.utils.to_categorical(y_test, 10)
    +
    +
    +
  4. +
  5. Define the VGSL spec string for the CNN:

    +

    The VGSL spec string defines the layers of the CNN. Here’s a simple CNN architecture:

    +
    vgsl_spec = "None,32,32,3 Cr3,3,32 Mp2,2,2,2 Cr3,3,64 Mp2,2,2,2 Rc2 Fc128 D25 Fs10"
    +
    +
    +

    Explanation:

    +
      +
    • None,32,32,3: Input layer for images of size 32x32 with 3 color channels (RGB).

    • +
    • Cr3,3,32: Convolutional layer with a 3x3 filter, ReLU activation, and 32 filters.

    • +
    • Mp2,2,2,2: MaxPooling layer with a 2x2 pool size and 2x2 strides.

    • +
    • Cr3,3,64: Second convolutional layer with 64 filters.

    • +
    • Rc2: Reshape layer to flatten the output for the fully connected layer.

    • +
    • Fc128: Fully connected (dense) layer with 128 units.

    • +
    • D25: Dropout layer with a 25% dropout rate.

    • +
    • Fs10: Output layer with 10 units and softmax activation for classification into 10 classes.

    • +
    +
  6. +
  7. Build and compile the model:

    +

    Use VGSLify to build and compile the model. This step generates the CNN architecture based on the VGSL string and compiles it for training.

    +
    vgsl_gn = VGSLModelGenerator(backend="tensorflow")
    +model = vgsl_gn.generate_model(vgsl_spec)
    +
    +model.compile(optimizer='adam',
    +              loss='categorical_crossentropy',
    +              metrics=['accuracy'])
    +
    +
    +
  8. +
  9. Train the model:

    +

    Now, train the CNN on the CIFAR-10 training set. You can adjust the batch size and number of epochs as needed.

    +
    history = model.fit(x_train, y_train, epochs=10, batch_size=64, validation_data=(x_test, y_test))
    +
    +
    +
  10. +
  11. Evaluate the model performance:

    +

    After training, evaluate the model on the test set to see how well it performs.

    +
    test_loss, test_acc = model.evaluate(x_test, y_test)
    +print(f'Test accuracy: {test_acc}')
    +
    +
    +

    You can also plot the training history to visualize how the accuracy and loss evolve over time.

    +
  12. +
+
+
+
+

Tutorial 2: Creating an LSTM for Sequence Prediction

+
+

Overview

+

In this tutorial, you will build an LSTM (Long Short-Term Memory) model using VGSLify to predict the next value in a sequence. This is commonly used in time-series forecasting. We will generate synthetic data, define an LSTM model using a VGSL string, and train the model to predict future values in the sequence.

+
+
+

Step-by-Step Instructions

+
    +
  1. Import necessary libraries:

    +
    import numpy as np
    +from vgslify.generator import VGSLModelGenerator
    +
    +
    +
  2. +
  3. Generate synthetic sequence data:

    +

    For this example, let’s generate a sine wave as our synthetic sequence data. The LSTM will learn to predict the next value in this sequence.

    +
    def generate_sine_wave(seq_length=1000):
    +    x = np.arange(seq_length)
    +    y = np.sin(x / 20.0)
    +    return y
    +
    +sine_wave = generate_sine_wave()
    +
    +# Prepare the data for LSTM input
    +def create_sequences(data, seq_length):
    +    x = []
    +    y = []
    +    for i in range(len(data) - seq_length):
    +        x.append(data[i:i+seq_length])
    +        y.append(data[i+seq_length])
    +    return np.array(x), np.array(y)
    +
    +seq_length = 50
    +x_train, y_train = create_sequences(sine_wave, seq_length)
    +
    +x_train = np.expand_dims(x_train, axis=-1)  # LSTM expects input shape (batch, time steps, features)
    +y_train = np.expand_dims(y_train, axis=-1)
    +
    +
    +
  4. +
  5. Define the VGSL spec string for the LSTM model:

    +

    Here’s the VGSL string to define an LSTM with 50 units, followed by dropout and an output layer:

    +
    vgsl_spec = f"None,{seq_length},{x_train.shape[1]} Lf50 D20 Fl1"
    +
    +
    +

    Explanation:

    +
      +
    • None,seq_length,x_train.shape[1]: Input shape with 50 sequence length and 50 features.

    • +
    • Lf50: LSTM with 50 units, without returning sequences.

    • +
    • D20: Dropout layer with 20% dropout rate.

    • +
    • Fl1: Output layer with 1 unit and linear activation for sequence prediction.

    • +
    +
  6. +
  7. Build and compile the model:

    +
    vgsl_gn = VGSLModelGenerator(backend="tensorflow")
    +model = vgsl_gn.generate_model(vgsl_spec)
    +
    +model.compile(optimizer='adam',
    +              loss='mean_squared_error')
    +
    +
    +
  8. +
  9. Train the model:

    +

    Train the model to predict the next value in the sine wave sequence.

    +
    history = model.fit(x_train, y_train, epochs=20, batch_size=64)
    +
    +
    +
  10. +
  11. Evaluate the model:

    +

    Once training is complete, evaluate the model by plotting the true vs predicted values in the sine wave sequence.

    +
    y_pred = model.predict(x_train)
    +
    +import matplotlib.pyplot as plt
    +plt.plot(y_train, label='True')
    +plt.plot(y_pred, label='Predicted')
    +plt.legend()
    +plt.show()
    +
    +
    +
  12. +
+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file