print("Hello world!") 2 + 2 typeof(42.0) function mandel(z) c = z maxiter = 80 for n in 1:maxiter if abs(z) > 2 return n-1 end z = z^2 + c end return maxiter end @time mandel(1.2) # time call on a Float64 @time mandel(3.4) # time on another Float64 @time mandel(1.0 + 5.0im) @time mandel(2.0 + 0.5im) using PyPlot plt.imshow([mandel(x + y * im) for y = -1:0.001:1, x = -2:0.001:1]) typeof(42) supertype(Int64) supertype(Signed) subtypes(Integer) Bool <: Integer # is Bool a subtype of Integer? Bool <: String divide(x, y) = x / y divide(1, 2) divide([1 2; 3 4], [1 2; 3 7]) divide(x::Integer, y::Integer) = floor(x/y) divide(x::String, y::String) = join([x, y], " / ") divide(1, 2) divide("Hello", "World!") divide(1.0, 2.0) abstract type Organism end struct Animal <: Organism name::String is_hervibore::Bool end struct Plant <: Organism name::String is_flowering::Bool end describe(o::Organism) = string(o.name) # fall-back method function describe(p::Plant) if p.is_flowering text = " is a flowering plant." else text = " is a non-flowering plant." end return p.name*text end describe(Animal("Elephant", true)) describe(Plant("Fern", false)) function function_x(x::String) println("this is a string: $x") end function function_x(x::Int) println("$(x^2) is the square of $x") end # each call to the function_x() will dispatch the corresponding method depending on the parameter's type function_x("a string") function_x(2) using ForwardDiff function sqrt_babylonian(s) x = s / 2 while abs(x^2 - s) > 0.001 x = (x + s/x) / 2 end x end sqrt_babylonian(2) - sqrt(2) @show ForwardDiff.derivative(sqrt_babylonian, 2); @show ForwardDiff.derivative(sqrt, 2); using Unitful using Unitful: J, kg, m, s 3J + 1kg * (1m / 1s)^2 using MLJ models() import Statistics using PrettyPrinting using StableRNGs X, y = @load_iris; @load DecisionTreeClassifier tree_model = DecisionTreeClassifier() tree = machine(tree_model, X, y) rng = StableRNG(566) train, test = partition(eachindex(y), 0.7, shuffle=true, rng=rng) test[1:3] fit!(tree, rows=train) fitted_params(tree) |> pprint ŷ = predict(tree, rows=test) @show ŷ[1] ȳ = predict_mode(tree, rows=test) @show ȳ[1] @show mode(ŷ[1]) mce = cross_entropy(ŷ, y[test]) |> mean round(mce, digits=4) using MLJ using StableRNGs import DataFrames @load RidgeRegressor pkg=MultivariateStats rng = StableRNG(6616) # for reproducibility x1 = rand(rng, 300) x2 = rand(rng, 300) x3 = rand(rng, 300) y = exp.(x1 - x2 -2x3 + 0.1*rand(rng, 300)) X = DataFrames.DataFrame(x1=x1, x2=x2, x3=x3) test, train = partition(eachindex(y), 0.8); Xs = source(X) ys = source(y, kind=:target) std_model = Standardizer() stand = machine(std_model, Xs) W = MLJ.transform(stand, Xs) box_model = UnivariateBoxCoxTransformer() box = machine(box_model, ys) z = MLJ.transform(box, ys) ridge_model = RidgeRegressor(lambda=0.1) ridge = machine(ridge_model, W, z) ẑ = predict(ridge, W) ŷ = inverse_transform(box, ẑ) @from_network CompositeModel(std=std_model, box=box_model, ridge=ridge_model) <= ŷ; cm = machine(CompositeModel(), X, y) res = evaluate!(cm, resampling=Holdout(fraction_train=0.8, rng=51), measure=rms) round(res.measurement[1], sigdigits=3)