MilesCranmer commited on
Commit
90f5e4c
1 Parent(s): b6c4c3a

Enable custom loss functions

Browse files
Files changed (2) hide show
  1. example.py +3 -1
  2. pysr/sr.py +36 -7
example.py CHANGED
@@ -10,7 +10,9 @@ equations = pysr(X, y, niterations=5,
10
  binary_operators=["plus", "mult"],
11
  unary_operators=[
12
  "cos", "exp", "sin", #Pre-defined library of operators (see https://pysr.readthedocs.io/en/latest/docs/operators/)
13
- "inv(x) = 1/x"]) # Define your own operator! (Julia syntax)
 
 
14
 
15
  ...# (you can use ctl-c to exit early)
16
 
 
10
  binary_operators=["plus", "mult"],
11
  unary_operators=[
12
  "cos", "exp", "sin", #Pre-defined library of operators (see https://pysr.readthedocs.io/en/latest/docs/operators/)
13
+ "inv(x) = 1/x"],
14
+ loss='L1DistLoss()',
15
+ julia_project="../SymbolicRegression.jl") # Define your own operator! (Julia syntax)
16
 
17
  ...# (you can use ctl-c to exit early)
18
 
pysr/sr.py CHANGED
@@ -57,12 +57,13 @@ sympy_mappings = {
57
  }
58
 
59
  def pysr(X=None, y=None, weights=None,
 
 
60
  procs=4,
 
61
  populations=None,
62
  niterations=100,
63
  ncyclesperiteration=300,
64
- binary_operators=["plus", "mult"],
65
- unary_operators=["cos", "exp", "sin"],
66
  alpha=0.1,
67
  annealing=True,
68
  fractionReplaced=0.10,
@@ -116,16 +117,42 @@ def pysr(X=None, y=None, weights=None,
116
  :param y: np.ndarray, 1D array. Rows are examples.
117
  :param weights: np.ndarray, 1D array. Each row is how to weight the
118
  mean-square-error loss on weights.
 
 
 
119
  :param procs: int, Number of processes (=number of populations running).
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  :param populations: int, Number of populations running; by default=procs.
121
  :param niterations: int, Number of iterations of the algorithm to run. The best
122
  equations are printed, and migrate between populations, at the
123
  end of each.
124
  :param ncyclesperiteration: int, Number of total mutations to run, per 10
125
  samples of the population, per iteration.
126
- :param binary_operators: list, List of strings giving the binary operators
127
- in Julia's Base, or in `operator.jl`.
128
- :param unary_operators: list, Same but for operators taking a single `Float32`.
129
  :param alpha: float, Initial temperature.
130
  :param annealing: bool, Whether to use annealing. You should (and it is default).
131
  :param fractionReplaced: float, How much of population to replace with migrating
@@ -262,7 +289,7 @@ def pysr(X=None, y=None, weights=None,
262
  weightSimplify=weightSimplify,
263
  constraints=constraints,
264
  extra_sympy_mappings=extra_sympy_mappings,
265
- julia_project=julia_project)
266
 
267
  kwargs = {**_set_paths(tempdir), **kwargs}
268
 
@@ -383,7 +410,7 @@ def _make_hyperparams_julia_str(X, alpha, annealing, batchSize, batching, binary
383
  parsimony, perturbationFactor, populations, procs, shouldOptimizeConstants,
384
  unary_operators, useFrequency, use_custom_variable_names,
385
  variable_names, warmupMaxsize, weightAddNode,
386
- ncyclesperiteration, fractionReplaced, topn, verbosity,
387
  weightDeleteNode, weightDoNothing, weightInsertNode, weightMutateConstant,
388
  weightMutateOperator, weightRandomize, weightSimplify, weights, **kwargs):
389
  def tuple_fix(ops):
@@ -411,11 +438,13 @@ greater=SymbolicRegression.greater
411
  relu=SymbolicRegression.relu
412
  logical_or=SymbolicRegression.logical_or
413
  logical_and=SymbolicRegression.logical_and
 
414
 
415
  options = SymbolicRegression.Options(binary_operators={'(' + tuple_fix(binary_operators) + ')'},
416
  unary_operators={'(' + tuple_fix(unary_operators) + ')'},
417
  {constraints_str}
418
  parsimony={parsimony:f}f0,
 
419
  alpha={alpha:f}f0,
420
  maxsize={maxsize:d},
421
  maxdepth={maxdepth:d},
 
57
  }
58
 
59
  def pysr(X=None, y=None, weights=None,
60
+ binary_operators=["plus", "mult"],
61
+ unary_operators=["cos", "exp", "sin"],
62
  procs=4,
63
+ loss='L2DistLoss()',
64
  populations=None,
65
  niterations=100,
66
  ncyclesperiteration=300,
 
 
67
  alpha=0.1,
68
  annealing=True,
69
  fractionReplaced=0.10,
 
117
  :param y: np.ndarray, 1D array. Rows are examples.
118
  :param weights: np.ndarray, 1D array. Each row is how to weight the
119
  mean-square-error loss on weights.
120
+ :param binary_operators: list, List of strings giving the binary operators
121
+ in Julia's Base.
122
+ :param unary_operators: list, Same but for operators taking a single scalar.
123
  :param procs: int, Number of processes (=number of populations running).
124
+ :param loss: str, String of Julia code specifying the loss function.
125
+ Can either be a loss from LossFunctions.jl, or your own
126
+ loss written as a function. Examples of custom written losses
127
+ include: `myloss(x, y) = abs(x-y)` for non-weighted, or
128
+ `myloss(x, y, w) = w*abs(x-y)` for weighted.
129
+ Among the included losses, these are:
130
+ Regression:
131
+ - `LPDistLoss{P}()`,
132
+ - `L1DistLoss()`,
133
+ - `L2DistLoss()` (mean square),
134
+ - `LogitDistLoss()`,
135
+ - `HuberLoss(d)`,
136
+ - `L1EpsilonInsLoss(ϵ)`,
137
+ - `L2EpsilonInsLoss(ϵ)`,
138
+ - `PeriodicLoss(c)`,
139
+ - `QuantileLoss(τ)`,
140
+ Classification:
141
+ - `ZeroOneLoss()`,
142
+ - `PerceptronLoss()`,
143
+ - `L1HingeLoss()`,
144
+ - `SmoothedL1HingeLoss(γ)`,
145
+ - `ModifiedHuberLoss()`,
146
+ - `L2MarginLoss()`,
147
+ - `ExpLoss()`,
148
+ - `SigmoidLoss()`,
149
+ - `DWDMarginLoss(q)`.
150
  :param populations: int, Number of populations running; by default=procs.
151
  :param niterations: int, Number of iterations of the algorithm to run. The best
152
  equations are printed, and migrate between populations, at the
153
  end of each.
154
  :param ncyclesperiteration: int, Number of total mutations to run, per 10
155
  samples of the population, per iteration.
 
 
 
156
  :param alpha: float, Initial temperature.
157
  :param annealing: bool, Whether to use annealing. You should (and it is default).
158
  :param fractionReplaced: float, How much of population to replace with migrating
 
289
  weightSimplify=weightSimplify,
290
  constraints=constraints,
291
  extra_sympy_mappings=extra_sympy_mappings,
292
+ julia_project=julia_project, loss=loss)
293
 
294
  kwargs = {**_set_paths(tempdir), **kwargs}
295
 
 
410
  parsimony, perturbationFactor, populations, procs, shouldOptimizeConstants,
411
  unary_operators, useFrequency, use_custom_variable_names,
412
  variable_names, warmupMaxsize, weightAddNode,
413
+ ncyclesperiteration, fractionReplaced, topn, verbosity, loss,
414
  weightDeleteNode, weightDoNothing, weightInsertNode, weightMutateConstant,
415
  weightMutateOperator, weightRandomize, weightSimplify, weights, **kwargs):
416
  def tuple_fix(ops):
 
438
  relu=SymbolicRegression.relu
439
  logical_or=SymbolicRegression.logical_or
440
  logical_and=SymbolicRegression.logical_and
441
+ loss = {loss}
442
 
443
  options = SymbolicRegression.Options(binary_operators={'(' + tuple_fix(binary_operators) + ')'},
444
  unary_operators={'(' + tuple_fix(unary_operators) + ')'},
445
  {constraints_str}
446
  parsimony={parsimony:f}f0,
447
+ loss=loss,
448
  alpha={alpha:f}f0,
449
  maxsize={maxsize:d},
450
  maxdepth={maxdepth:d},