Este Ă© o inĂcio de uma histĂłria sobre como a matemĂĄtica invadiu pela primeira vez a geologia, como entĂŁo um especialista em TI veio e programou tudo, criando assim uma nova profissĂŁo de "geĂłlogo digital". Esta Ă© uma histĂłria sobre como a modelagem estocĂĄstica difere da krigagem. Ă tambĂ©m uma tentativa de mostrar como vocĂȘ mesmo pode escrever seu primeiro software geolĂłgico e, possivelmente, transformar de alguma forma a indĂșstria de engenharia geolĂłgica e de petrĂłleo.
Vamos calcular quanto Ăłleo existe
, , , , . , , . , (, ). , , , .
â , , , , , , , .
, â . , â . â . , ( , , ) . . , , ( , , ).
. , , , , . â , , . â , , , . , , .
. ; , , , .
, , . , , , , . , .
, , , . , , . , . . , , , - , .
, â , .
, , .. , , . , , , . â â «support». , , , , . .
, , (Krige, D.G. 1951. A statistical approach to some basic mine valuation problems on the Witwatersrand. Journal of the Chemical, Metallurgical and Mining Society of South Africa, December 1951. pp. 119â139.). , -, , , , . , , 5Ă5 , , , 1 , 50 Ă50 Ă1 â , , ( â upscaling).
. , â . .
. ,
.
â , . , . , , , .
:
â . (. . 1). , . , . , . , , 1, , .
Python .
import numpy as np
import matplotlib.pyplot as pl
# num of data
N = 5
np.random.seed(0)
# source data
z = np.random.rand(N)
u = np.random.rand(N)
x = np.linspace(0, 1, 100)
y = np.zeros_like(x)
# norm weights
w = np.zeros_like(x)
# power
m = 2
# interpolation
for i in range(N):
y += z[i] * 1 / np.abs(u[i] - x) ** m
w += 1 / np.abs(u[i] - x) ** m
# normalization
y /= w
# add source data
x = np.concatenate((x, u))
y = np.concatenate((y, z))
order = x.argsort()
x = x[order]
y = y[order]
# draw graph
pl.figure()
pl.scatter(u, z)
pl.plot(x, y)
pl.show()
pl.close()
1.
. , , - . , , . -, , , , ? , . , 1 . , , . , , (. . 2). .
2.
, , , , . , , . , . â , , : , , , . , .
(2) :
, , . , , - - . .
, :
- , â . (4) â «», . . , (1):
. , â , â , â «» . , , (6),
(6) ( ). (4) , , dual kriging. (6) (3), , . , (7) , , , , ( ).
(. . 3). , , .
, â Python:
import numpy as np
import matplotlib.pyplot as pl
# num of data
N = 5
np.random.seed(0)
# source data
z = np.random.rand(N) - 0.5
u = np.random.rand(N)
x = np.linspace(0, 1, 100)
y = np.zeros_like(x)
# covariance function
def c(h):
return np.exp(-np.abs(h ** 2 * 20.))
# covariance matrix
C = np.zeros((N, N))
for i in range(N):
C[i, :] = c(u - u[i])
# dual kriging weights
lamda = np.linalg.solve(C, z)
# interpolation
for i in range(N):
y += lamda[i] * c(u[i] - x)
# add source data
x = np.concatenate((x, u))
y = np.concatenate((y, z))
order = x.argsort()
x = x[order]
y = y[order]
# draw graph
pl.figure()
pl.scatter(u, z)
pl.plot(x, y)
pl.show()
pl.close()
3.
, , . (5) (6) , .
, , . , , , .
. , (), , , , . , «», . , (6), , «», . , «» . : . , , , â . , , .
. , . . , , .
.
from theory import probability
from numpy import linalg
, , .
, . () : u , ( ). , .
. . , ( , ).
, - â . - , . : â ( ), , , , . â , .
( , ) . â . , u , .
, , : . -, â «» (4), : . , , .., , , .
, , , , . , , . , , . .
, ( ). :
, , , , . . , , , ( ) , , . . .
, ( , , â , ). , (. . 4).
import numpy as np
import matplotlib.pyplot as pl
np.random.seed(0)
# source data
N = 100
x = np.linspace(0, 1, 100)
# covariance function
def c(h):
return np.exp(-np.abs(h ** 2 * 250))
# covariance matrix
C = np.zeros((N, N))
for i in range(N):
C[i, :] = c(x - x[i])
# eigen decomposition
w, v = np.linalg.eig(C)
A = v @ np.diag(w ** 0.5)
# you can check, that C == A @ A.T
# independent normal values
zeta = np.random.randn(N)
# dependent multinormal values
Z = A @ zeta
# draw graph
pl.figure()
pl.plot(x, Z)
pl.show()
pl.close()
4.
, , ( , ). , 5.
import numpy as np
import matplotlib.pyplot as pl
np.random.seed(3)
# source data
M = 5
# coordinates of source data
u = np.random.rand(M)
# source data
z = np.random.randn(M)
# Modeling mesh
N = 100
x = np.linspace(0, 1, N)
# covariance function
def c(h):
return np.exp(ânp.abs(h ââ 2 â 250))
# covariance matrix meshâmesh
Cyy = np.zeros ((N, N))
for i in range (N):
Cyy[ i , : ] = c(x â x[i])
# covariance matrix meshâdata
Cyz = np.zeros ((N, M))
# covariance matrix dataâdata
Czz = np.zeros ((M, M))
for j in range (M):
Cyz [:, j] = c(x â u[j])
Czz [:, j] = c(u â u[j])
# posterior covariance
Cpost = Cyy â Cyz @ np.linalg.inv(Czz) @ Cyz.T
# lets find the posterior mean, i.e. Kriging interpolation
lamda = np.linalg.solve (Czz, z)
y = np.zeros_like(x)
# interpolation
for i in range (M):
y += lamda[i] â c(u[i] â x)
# eigen decomposition
w, v = np.linalg.eig(Cpost)
A = v @ np.diag (w ââ 0.5)
# you can check, that Cpost == A@A.T
# draw graph
pl.figure()
for k in range (5):
# independent normal values
zeta = np.random.randn(N)
# dependent multinormal values
Z = A @ zeta
pl.plot(x, Z + y, color=[(5 â k) / 5] â 3)
pl.plot(x, Z + y, color=[(5 â k) / 5] â 3, label=âStochastic realizationsâ)
pl.plot(x, y, â. â, color=âblueâ, alpha=0.4, label=âExpectation(Kriging)â)
pl.scatter(u, z, color=âred â, label=âSource dataâ)
pl.legend()
pl.show()
pl.close()
5.
5, . , , , , ( ). , , . â .
, , , (6). , , ( , , ).
, . , . . , , , â . , . â . , - 22 â . -5 , â - . , , , !
?
. (2D 3D), ( 1D ), , , numpy .
, , 6 «» -.
6. «» -
« ». , . (« ») (. 7) (. 8).
7. «» -
8. «» -
7 8 «» , , , «» .
«»
, . , «X» «Y», . 9.
9. "X"
, . , , â ( ). (. . 10).
10. "X"
10 , , - .
, . . , . , .
, , , () , . . ( ), . . , ? , , , :
- ;
- ;
- , .
, - , , , , , , .