# biome.text.modules.heads.task_head Module

# TaskOutput Class


class TaskOutput (
    logits: torch.Tensor,
    loss: Union[torch.Tensor, NoneType] = None,
    **extra_data,
)

Task output data class

A task output will contains almost the logits and probs properties

# as_dict Method


def as_dict(self) -> Dict[str, torch.Tensor]

Dict reprentation of task output

# TaskName Class


class TaskName (
    value,
    names=None,
    *,
    module=None,
    qualname=None,
    type=None,
    start=1,
)

The task name enum structure

# Ancestors

  • enum.Enum

# TaskHead Class


class TaskHead (backbone: ModelBackbone)

Base task head class

Initializes internal Module state, shared by both nn.Module and ScriptModule.

# Ancestors

  • torch.nn.modules.module.Module
  • allennlp.common.registrable.Registrable
  • allennlp.common.from_params.FromParams

# Subclasses

# register Static method


def register (
  overrides: bool = False,
  **kwargs,
) 

Enables the task head component for pipeline loading

# Instance variables

var labels : List[str]

The configured vocab labels

var num_labels

The number of vocab labels

# on_vocab_update Method


def on_vocab_update(self)

Actions when vocab is updated. Rebuild here modules that initialization depends on some vocab metric

At this point, the model.vocab is already updated, so it could be used for architecture update

# extend_labels Method


def extend_labels (
  self,
  labels: List[str],
) 

Extends the number of labels

# task_name Method


def task_name(self) -> TaskName

The task head name

# inputs Method


def inputs(self) -> Union[List[str], NoneType]

The expected inputs names for data featuring. If no defined, will be automatically calculated from featurize signature

# forward Method


def forward (
  self,
  *args: Any,
  **kwargs: Any,
)  -> TaskOutput

Defines the computation performed at every call.

Should be overridden by all subclasses.

Note

Although the recipe for forward pass needs to be defined within this function, one should call the :class:Module instance afterwards instead of this since the former takes care of running the registered hooks while the latter silently ignores them.

# get_metrics Method


def get_metrics (
  self,
  reset: bool = False,
)  -> Dict[str, float]

Metrics dictionary for training task

# featurize Method


def featurize (
  self,
  *args,
  **kwargs,
)  -> Union[allennlp.data.instance.Instance, NoneType]

Converts incoming data into an allennlp Instance, used for pyTorch tensors generation

# decode Method


def decode (
  self,
  output: TaskOutput,
)  -> TaskOutput

Completes the output for the prediction

The base implementation adds nothing.

Parameters

output
The output from the head's forward method

Returns

completed_output
 

# explain_prediction Method


def explain_prediction (
  self,
  prediction: Dict[str, ],
  instance: allennlp.data.instance.Instance,
  n_steps: int,
)  -> Dict[str, Any]

Adds embedding explanations information to prediction output

Parameters

prediction : Dict[str,, numpy.array]
The result input predictions
instance : Instance
The featurized input instance
n_steps : int
The number of steps to find token level attributions

Returns

Prediction with explanation

# TaskHeadConfiguration Class


class TaskHeadConfiguration (*args, **kwds)

Layer spec for TaskHead components

# Ancestors

# Inherited members

Maintained by