config

class common.infer.config.InferenceSubtaskConfig(output_dir='${hydra:runtime.output_dir}', data_dir='${oc.env:AI_REPO_PATH}/data/', device='cpu', seed=0)[source]

Bases: BaseSubtaskConfig