Skip to content

hetero_nn_param

hetero_nn_param

Classes

SelectorParam

Parameters used for Homo Neural Network.

Parameters:

Name Type Description Default
method None

None or str back propagation select method, accept "relative" only, default: None

None
selective_size None

int deque size to use, store the most recent selective_size historical loss, default: 1024

1024
beta None

int sample whose selective probability >= power(np.random, beta) will be selected

1
min_prob None

Numeric selective probability is max(min_prob, rank_rate)

0
Source code in federatedml/param/hetero_nn_param.py
class SelectorParam(object):
    """
    Parameters used for Homo Neural Network.

    Args:
        method: None or str
            back propagation select method, accept "relative" only, default: None
        selective_size: int
            deque size to use, store the most recent selective_size historical loss, default: 1024
        beta: int
            sample whose selective probability >= power(np.random, beta) will be selected
        min_prob: Numeric
            selective probability is max(min_prob, rank_rate)

    """
    def __init__(self, method=None, beta=1, selective_size=consts.SELECTIVE_SIZE, min_prob=0, random_state=None):
        self.method = method
        self.selective_size = selective_size
        self.beta = beta
        self.min_prob = min_prob
        self.random_state = random_state

    def check(self):
        if self.method is not None and self.method not in ["relative"]:
            raise ValueError('selective method should be None be "relative"')

        if not isinstance(self.selective_size, int) or self.selective_size <= 0:
            raise ValueError("selective size should be a positive integer")

        if not isinstance(self.beta, int):
            raise ValueError("beta should be integer")

        if not isinstance(self.min_prob, (float, int)):
            raise ValueError("min_prob should be numeric")
__init__(self, method=None, beta=1, selective_size=1024, min_prob=0, random_state=None) special
Source code in federatedml/param/hetero_nn_param.py
def __init__(self, method=None, beta=1, selective_size=consts.SELECTIVE_SIZE, min_prob=0, random_state=None):
    self.method = method
    self.selective_size = selective_size
    self.beta = beta
    self.min_prob = min_prob
    self.random_state = random_state
check(self)
Source code in federatedml/param/hetero_nn_param.py
def check(self):
    if self.method is not None and self.method not in ["relative"]:
        raise ValueError('selective method should be None be "relative"')

    if not isinstance(self.selective_size, int) or self.selective_size <= 0:
        raise ValueError("selective size should be a positive integer")

    if not isinstance(self.beta, int):
        raise ValueError("beta should be integer")

    if not isinstance(self.min_prob, (float, int)):
        raise ValueError("min_prob should be numeric")

HeteroNNParam (BaseParam)

Parameters used for Hetero Neural Network.

Parameters:

Name Type Description Default
task_type None

str, task type of hetero nn model, one of 'classification', 'regression'.

'classification'
config_type None

str, accept "keras" only.

'keras'
bottom_nn_define None

a dict represents the structure of bottom neural network.

None
interactive_layer_define None

a dict represents the structure of interactive layer.

None
interactive_layer_lr None

float, the learning rate of interactive layer.

0.9
top_nn_define None

a dict represents the structure of top neural network.

None
optimizer None

optimizer method, accept following types: 1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD" 2. a dict, with a required key-value pair keyed by "optimizer", with optional key-value pairs such as learning rate. defaults to "SGD"

'SGD'
loss None

str, a string to define loss function used

None
epochs None

int, the maximum iteration for aggregation in training.

100
batch_size None

int, batch size when updating model. -1 means use all data in a batch. i.e. Not to use mini-batch strategy. defaults to -1.

required
early_stop None

str, accept 'diff' only in this version, default: 'diff' Method used to judge converge or not. a) diff: Use difference of loss between two iterations to judge whether converge.

required
floating_point_precision None

None or integer, if not None, means use floating_point_precision-bit to speed up calculation, e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide the result by 2**floating_point_precision in the end.

23
drop_out_keep_rate None

float, should betweend 0 and 1, if not equals to 1.0, will enabled drop out

1.0
callback_param None

CallbackParam object

<federatedml.param.callback_param.CallbackParam object at 0x7f3f8a7f5250>
Source code in federatedml/param/hetero_nn_param.py
class HeteroNNParam(BaseParam):
    """
    Parameters used for Hetero Neural Network.

    Args:
        task_type: str, task type of hetero nn model, one of 'classification', 'regression'.
        config_type: str, accept "keras" only.
        bottom_nn_define: a dict represents the structure of bottom neural network.
        interactive_layer_define: a dict represents the structure of interactive layer.
        interactive_layer_lr: float, the learning rate of interactive layer.
        top_nn_define: a dict represents the structure of top neural network.
        optimizer: optimizer method, accept following types:
            1. a string, one of "Adadelta", "Adagrad", "Adam", "Adamax", "Nadam", "RMSprop", "SGD"
            2. a dict, with a required key-value pair keyed by "optimizer",
                with optional key-value pairs such as learning rate.
            defaults to "SGD"
        loss:  str, a string to define loss function used
        epochs: int, the maximum iteration for aggregation in training.
        batch_size : int, batch size when updating model.
            -1 means use all data in a batch. i.e. Not to use mini-batch strategy.
            defaults to -1.
        early_stop : str, accept 'diff' only in this version, default: 'diff'
            Method used to judge converge or not.
                a)	diff: Use difference of loss between two iterations to judge whether converge.
        floating_point_precision: None or integer, if not None, means use floating_point_precision-bit to speed up calculation,
                                   e.g.: convert an x to round(x * 2**floating_point_precision) during Paillier operation, divide
                                          the result by 2**floating_point_precision in the end.
        drop_out_keep_rate: float, should betweend 0 and 1, if not equals to 1.0, will enabled drop out
        callback_param: CallbackParam object
    """

    def __init__(self,
                 task_type='classification',
                 config_type="keras",
                 bottom_nn_define=None,
                 top_nn_define=None,
                 interactive_layer_define=None,
                 interactive_layer_lr=0.9,
                 optimizer='SGD',
                 loss=None,
                 epochs=100,
                 batch_size=-1,
                 early_stop="diff",
                 tol=1e-5,
                 encrypt_param=EncryptParam(),
                 encrypted_mode_calculator_param=EncryptedModeCalculatorParam(mode="confusion_opt"),
                 predict_param=PredictParam(),
                 cv_param=CrossValidationParam(),
                 validation_freqs=None,
                 early_stopping_rounds=None,
                 metrics=None,
                 use_first_metric_only=True,
                 selector_param=SelectorParam(),
                 floating_point_precision=23,
                 drop_out_keep_rate=1.0,
                 callback_param=CallbackParam()):
        super(HeteroNNParam, self).__init__()

        self.task_type = task_type
        self.config_type = config_type
        self.bottom_nn_define = bottom_nn_define
        self.interactive_layer_define = interactive_layer_define
        self.interactive_layer_lr = interactive_layer_lr
        self.top_nn_define = top_nn_define
        self.batch_size = batch_size
        self.epochs = epochs
        self.early_stop = early_stop
        self.tol = tol
        self.optimizer = optimizer
        self.loss = loss
        self.validation_freqs = validation_freqs
        self.early_stopping_rounds = early_stopping_rounds
        self.metrics = metrics or []
        self.use_first_metric_only = use_first_metric_only

        self.encrypt_param = copy.deepcopy(encrypt_param)
        self.encrypted_model_calculator_param = encrypted_mode_calculator_param
        self.predict_param = copy.deepcopy(predict_param)
        self.cv_param = copy.deepcopy(cv_param)

        self.selector_param = selector_param
        self.floating_point_precision = floating_point_precision

        self.drop_out_keep_rate = drop_out_keep_rate

        self.callback_param = copy.deepcopy(callback_param)

    def check(self):
        self.optimizer = self._parse_optimizer(self.optimizer)
        supported_config_type = ["keras"]

        if self.task_type not in ["classification", "regression"]:
            raise ValueError("config_type should be classification or regression")

        if self.config_type not in supported_config_type:
            raise ValueError(f"config_type should be one of {supported_config_type}")

        if not isinstance(self.tol, (int, float)):
            raise ValueError("tol should be numeric")

        if not isinstance(self.epochs, int) or self.epochs <= 0:
            raise ValueError("epochs should be a positive integer")

        if self.bottom_nn_define and not isinstance(self.bottom_nn_define, dict):
            raise ValueError("bottom_nn_define should be a dict defining the structure of neural network")

        if self.top_nn_define and not isinstance(self.top_nn_define, dict):
            raise ValueError("top_nn_define should be a dict defining the structure of neural network")

        if self.interactive_layer_define is not None and not isinstance(self.interactive_layer_define, dict):
            raise ValueError(
                "the interactive_layer_define should be a dict defining the structure of interactive layer")

        if self.batch_size != -1:
            if not isinstance(self.batch_size, int) \
                    or self.batch_size < consts.MIN_BATCH_SIZE:
                raise ValueError(
                    " {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size))

        if self.early_stop != "diff":
            raise ValueError("early stop should be diff in this version")

        if self.metrics is not None and not isinstance(self.metrics, list):
            raise ValueError("metrics should be a list")

        if self.floating_point_precision is not None and \
                (not isinstance(self.floating_point_precision, int) or\
                 self.floating_point_precision < 0 or self.floating_point_precision > 63):
            raise ValueError("floating point precision should be null or a integer between 0 and 63")

        if not isinstance(self.drop_out_keep_rate, (float, int)) or self.drop_out_keep_rate < 0.0 or \
                self.drop_out_keep_rate > 1.0:
            raise ValueError("drop_out_keep_rate should be in range [0.0, 1.0]")

        self.encrypt_param.check()
        self.encrypted_model_calculator_param.check()
        self.predict_param.check()
        self.selector_param.check()

        descr = "hetero nn param's "

        for p in ["early_stopping_rounds", "validation_freqs",
                  "use_first_metric_only"]:
            if self._deprecated_params_set.get(p):
                if "callback_param" in self.get_user_feeded():
                    raise ValueError(f"{p} and callback param should not be set simultaneously,"
                                     f"{self._deprecated_params_set}, {self.get_user_feeded()}")
                else:
                    self.callback_param.callbacks = ["PerformanceEvaluate"]
                break

        if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"):
            self.callback_param.validation_freqs = self.validation_freqs

        if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"):
            self.callback_param.early_stopping_rounds = self.early_stopping_rounds

        if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"):
            if self.metrics:
                self.callback_param.metrics = self.metrics

        if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"):
            self.callback_param.use_first_metric_only = self.use_first_metric_only

    @staticmethod
    def _parse_optimizer(opt):
        """
        Examples:

            1. "optimize": "SGD"
            2. "optimize": {
                "optimizer": "SGD",
                "learning_rate": 0.05
            }
        """

        kwargs = {}
        if isinstance(opt, str):
            return SimpleNamespace(optimizer=opt, kwargs=kwargs)
        elif isinstance(opt, dict):
            optimizer = opt.get("optimizer", kwargs)
            if not optimizer:
                raise ValueError(f"optimizer config: {opt} invalid")
            kwargs = {k: v for k, v in opt.items() if k != "optimizer"}
            return SimpleNamespace(optimizer=optimizer, kwargs=kwargs)
        elif opt is None:
            return None
        else:
            raise ValueError(f"invalid type for optimize: {type(opt)}")
__init__(self, task_type='classification', config_type='keras', bottom_nn_define=None, top_nn_define=None, interactive_layer_define=None, interactive_layer_lr=0.9, optimizer='SGD', loss=None, epochs=100, batch_size=-1, early_stop='diff', tol=1e-05, encrypt_param=<federatedml.param.encrypt_param.EncryptParam object at 0x7f3f8a7f50d0>, encrypted_mode_calculator_param=<federatedml.param.encrypted_mode_calculation_param.EncryptedModeCalculatorParam object at 0x7f3f8a7f5150>, predict_param=<federatedml.param.predict_param.PredictParam object at 0x7f3f8a7f5350>, cv_param=<federatedml.param.cross_validation_param.CrossValidationParam object at 0x7f3f8a7f5110>, validation_freqs=None, early_stopping_rounds=None, metrics=None, use_first_metric_only=True, selector_param=<federatedml.param.hetero_nn_param.SelectorParam object at 0x7f3f8a7f52d0>, floating_point_precision=23, drop_out_keep_rate=1.0, callback_param=<federatedml.param.callback_param.CallbackParam object at 0x7f3f8a7f5250>) special
Source code in federatedml/param/hetero_nn_param.py
def __init__(self,
             task_type='classification',
             config_type="keras",
             bottom_nn_define=None,
             top_nn_define=None,
             interactive_layer_define=None,
             interactive_layer_lr=0.9,
             optimizer='SGD',
             loss=None,
             epochs=100,
             batch_size=-1,
             early_stop="diff",
             tol=1e-5,
             encrypt_param=EncryptParam(),
             encrypted_mode_calculator_param=EncryptedModeCalculatorParam(mode="confusion_opt"),
             predict_param=PredictParam(),
             cv_param=CrossValidationParam(),
             validation_freqs=None,
             early_stopping_rounds=None,
             metrics=None,
             use_first_metric_only=True,
             selector_param=SelectorParam(),
             floating_point_precision=23,
             drop_out_keep_rate=1.0,
             callback_param=CallbackParam()):
    super(HeteroNNParam, self).__init__()

    self.task_type = task_type
    self.config_type = config_type
    self.bottom_nn_define = bottom_nn_define
    self.interactive_layer_define = interactive_layer_define
    self.interactive_layer_lr = interactive_layer_lr
    self.top_nn_define = top_nn_define
    self.batch_size = batch_size
    self.epochs = epochs
    self.early_stop = early_stop
    self.tol = tol
    self.optimizer = optimizer
    self.loss = loss
    self.validation_freqs = validation_freqs
    self.early_stopping_rounds = early_stopping_rounds
    self.metrics = metrics or []
    self.use_first_metric_only = use_first_metric_only

    self.encrypt_param = copy.deepcopy(encrypt_param)
    self.encrypted_model_calculator_param = encrypted_mode_calculator_param
    self.predict_param = copy.deepcopy(predict_param)
    self.cv_param = copy.deepcopy(cv_param)

    self.selector_param = selector_param
    self.floating_point_precision = floating_point_precision

    self.drop_out_keep_rate = drop_out_keep_rate

    self.callback_param = copy.deepcopy(callback_param)
check(self)
Source code in federatedml/param/hetero_nn_param.py
def check(self):
    self.optimizer = self._parse_optimizer(self.optimizer)
    supported_config_type = ["keras"]

    if self.task_type not in ["classification", "regression"]:
        raise ValueError("config_type should be classification or regression")

    if self.config_type not in supported_config_type:
        raise ValueError(f"config_type should be one of {supported_config_type}")

    if not isinstance(self.tol, (int, float)):
        raise ValueError("tol should be numeric")

    if not isinstance(self.epochs, int) or self.epochs <= 0:
        raise ValueError("epochs should be a positive integer")

    if self.bottom_nn_define and not isinstance(self.bottom_nn_define, dict):
        raise ValueError("bottom_nn_define should be a dict defining the structure of neural network")

    if self.top_nn_define and not isinstance(self.top_nn_define, dict):
        raise ValueError("top_nn_define should be a dict defining the structure of neural network")

    if self.interactive_layer_define is not None and not isinstance(self.interactive_layer_define, dict):
        raise ValueError(
            "the interactive_layer_define should be a dict defining the structure of interactive layer")

    if self.batch_size != -1:
        if not isinstance(self.batch_size, int) \
                or self.batch_size < consts.MIN_BATCH_SIZE:
            raise ValueError(
                " {} not supported, should be larger than 10 or -1 represent for all data".format(self.batch_size))

    if self.early_stop != "diff":
        raise ValueError("early stop should be diff in this version")

    if self.metrics is not None and not isinstance(self.metrics, list):
        raise ValueError("metrics should be a list")

    if self.floating_point_precision is not None and \
            (not isinstance(self.floating_point_precision, int) or\
             self.floating_point_precision < 0 or self.floating_point_precision > 63):
        raise ValueError("floating point precision should be null or a integer between 0 and 63")

    if not isinstance(self.drop_out_keep_rate, (float, int)) or self.drop_out_keep_rate < 0.0 or \
            self.drop_out_keep_rate > 1.0:
        raise ValueError("drop_out_keep_rate should be in range [0.0, 1.0]")

    self.encrypt_param.check()
    self.encrypted_model_calculator_param.check()
    self.predict_param.check()
    self.selector_param.check()

    descr = "hetero nn param's "

    for p in ["early_stopping_rounds", "validation_freqs",
              "use_first_metric_only"]:
        if self._deprecated_params_set.get(p):
            if "callback_param" in self.get_user_feeded():
                raise ValueError(f"{p} and callback param should not be set simultaneously,"
                                 f"{self._deprecated_params_set}, {self.get_user_feeded()}")
            else:
                self.callback_param.callbacks = ["PerformanceEvaluate"]
            break

    if self._warn_to_deprecate_param("validation_freqs", descr, "callback_param's 'validation_freqs'"):
        self.callback_param.validation_freqs = self.validation_freqs

    if self._warn_to_deprecate_param("early_stopping_rounds", descr, "callback_param's 'early_stopping_rounds'"):
        self.callback_param.early_stopping_rounds = self.early_stopping_rounds

    if self._warn_to_deprecate_param("metrics", descr, "callback_param's 'metrics'"):
        if self.metrics:
            self.callback_param.metrics = self.metrics

    if self._warn_to_deprecate_param("use_first_metric_only", descr, "callback_param's 'use_first_metric_only'"):
        self.callback_param.use_first_metric_only = self.use_first_metric_only

Last update: 2021-12-01
Back to top