Examples#
Compute a distance between two graphs.
See Distance between two graphs.
<<<
import copy
from mlstatpy.graph import GraphDistance
# We define two graphs as list of edges.
graph1 = [("a", "b"), ("b", "c"), ("b", "X"), ("X", "c"),
("c", "d"), ("d", "e"), ("0", "b")]
graph2 = [("a", "b"), ("b", "c"), ("b", "X"), ("X", "c"),
("c", "t"), ("t", "d"), ("d", "e"), ("d", "g")]
# We convert them into objects GraphDistance.
graph1 = GraphDistance(graph1)
graph2 = GraphDistance(graph2)
distance, graph = graph1.distance_matching_graphs_paths(graph2, use_min=False)
print("distance", distance)
print("common paths:", graph)
>>>
distance 0.3318250377073907
common paths: 0
X
a
b
c
d
e
00
11
g
t
a -> b []
b -> c []
b -> X []
X -> c []
c -> d []
d -> e []
0 -> b []
00 -> a []
00 -> 0 []
e -> 11 []
c -> 2a.t []
2a.t -> d []
d -> 2a.g []
2a.g -> 11 []
(entrée originale : graph_distance.py:docstring of mlstatpy.graph.graph_distance.GraphDistance, line 3)
Stochastic Gradient Descent applied to linear regression
The following example how to optimize a simple linear regression.
<<<
import numpy
from mlstatpy.optim import SGDOptimizer
def fct_loss(c, X, y):
return numpy.linalg.norm(X @ c - y) ** 2
def fct_grad(c, x, y, i=0):
return x * (x @ c - y) * 0.1
coef = numpy.array([0.5, 0.6, -0.7])
X = numpy.random.randn(10, 3)
y = X @ coef
sgd = SGDOptimizer(numpy.random.randn(3))
sgd.train(X, y, fct_loss, fct_grad, max_iter=15, verbose=True)
print('optimized coefficients:', sgd.coef)
>>>
0/15: loss: 88.79 lr=0.1 max(coef): 1.9 l1=0/2.9 l2=0/4.5
1/15: loss: 47.22 lr=0.0302 max(coef): 1.7 l1=0.38/2.4 l2=0.084/3.2
2/15: loss: 39.7 lr=0.0218 max(coef): 1.4 l1=1.8/3 l2=2.5/3.3
3/15: loss: 22.29 lr=0.018 max(coef): 0.92 l1=0.0063/2.5 l2=2.3e-05/2.1
4/15: loss: 10.18 lr=0.0156 max(coef): 0.85 l1=0.01/1.9 l2=3.5e-05/1.4
5/15: loss: 4.194 lr=0.014 max(coef): 0.76 l1=0.00065/1.6 l2=2.4e-07/1.1
6/15: loss: 1.646 lr=0.0128 max(coef): 0.71 l1=0.065/1.8 l2=0.0018/1.1
7/15: loss: 0.7677 lr=0.0119 max(coef): 0.66 l1=0.13/1.8 l2=0.0076/1.1
8/15: loss: 0.4433 lr=0.0111 max(coef): 0.63 l1=0.095/1.8 l2=0.0042/1.1
9/15: loss: 0.272 lr=0.0105 max(coef): 0.6 l1=0.00051/1.8 l2=8.8e-08/1.1
10/15: loss: 0.1774 lr=0.00995 max(coef): 0.6 l1=0.00076/1.8 l2=2e-07/1
11/15: loss: 0.1309 lr=0.00949 max(coef): 0.61 l1=0.001/1.8 l2=3.4e-07/1
12/15: loss: 0.1065 lr=0.00909 max(coef): 0.61 l1=0.071/1.7 l2=0.0042/1
13/15: loss: 0.08566 lr=0.00874 max(coef): 0.62 l1=0.0081/1.7 l2=3.2e-05/1
14/15: loss: 0.07239 lr=0.00842 max(coef): 0.62 l1=0.06/1.7 l2=0.003/1
15/15: loss: 0.06085 lr=0.00814 max(coef): 0.63 l1=0.056/1.7 l2=0.0025/1
optimized coefficients: [ 0.541 0.577 -0.629]
(entrée originale : sgd.py:docstring of mlstatpy.optim.sgd.SGDOptimizer, line 34)