Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
Files with the code to study the dynamical systems used in the paper
  • Loading branch information
PythagoreanCult authored Dec 9, 2024
1 parent b037690 commit 1946999
Show file tree
Hide file tree
Showing 8 changed files with 1,019 additions and 0 deletions.
70 changes: 70 additions & 0 deletions scripts/Cantor shift case study.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
"""
This file computes and plots quantities related with the regular variation property for the one
sided full shift of the Cantor set. The first section of the code iterates the Cantor shift map
and computes the quantities described in the src/regularly_varying.jl file for a point in the
Cantor Set. The rest just save, read and plot the data.
"""


using DrWatson
@quickactivate "FractalDimensionsQ"

using ChaosTools, OrdinaryDiffEq
using Distances
using DynamicalSystemsBase
using ProgressMeter
using Interpolations
using DataFrames
using CSV
using FractalDimensions
using CairoMakie
using Tables

################### Iterating the process ################

sequence_lenght = 40
function Shift_map!(un,u, p, n)
deleteat!(u,1)
push!(u,rand((0,2)))
un = u
return nothing
end

function parametrizationR(state)
x = sum(state .* [1/3^i for i in 1:sequence_lenght])
return SVector(x)
end
function fakeinverse(state)
return vec([rand((0,2)) for i in 1:sequence_lenght])
end

u0 = [rand((0,2)) for i in 1:sequence_lenght]
p0 = [0.1]
cantor_shift = DeterministicIteratedMap(Shift_map!, u0, p0)
cshift = ProjectedDynamicalSystem(cantor_shift,parametrizationR,fakeinverse)

point = [0.10804982490226529]
maxtime = 500_000_000
nevents = 5000


updatetimes, radii, volumeratio, Δloc, points, corr_dim = regularlyvarying(cshift,point,nevents,maxtime)

maxupdates = length(updatetimes)
endimension = round(Δloc[length(Δloc)],digits=3)
endCorr = round(corr_dim[length(corr_dim)],digits=3)

############ Saving the data

result = DataFrame(Tables.table(hcat(updatetimes, radii, volumeratio, Δloc, vcat(points,zeros(maxupdates - nevents,1)),corr_dim)))
rename!(result,:Column1 => :Update_times, :Column2 => :Radii, :Column3 => :volumeratio, :Column4 => :Δloc, :Column5 => :Pointsx, :Column6 => :Corr_dim)
CSV.write(savename("Cantor Shift data",nothing,"csv"),result)

####### Reading the data #########

cantordata = CSV.read("Cantor Shift data.csv",DataFrame)

######### Plotting #############

plot_regular_variation_one_line(cantordata[:,2],cantordata[:,3],cantordata[:,4], cantordata[:,6], (0.4,1), "Cantor Shift")
wsave("Cantor Shift.png",current_figure())
166 changes: 166 additions & 0 deletions scripts/Extremal index check.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
"""
This script tests the intervals estimator for the extremal index (Maria Süveges, 2007) in a continuous
time setting. Since the estimator is intended for discrete systems, there is an ambiguity in how to
correctly discretise the process to apply the algoritm. The code simulates an obsevable computed along
an orbit in the Lorenz 63 system and computes its extremal index for different values of the time step
and the trajectory lenght and saves the data. The last sections reads the data and estimates the
continuous time that the process spends on average in a cluster through the extremal index.
"""

using CairoMakie
using DynamicalSystems
using StaticArrays
using Statistics
using ChaosTools
using Distances
using ProgressMeter
using DataFrames, CSV, Tables, DrWatson

################### Same lenght, different time step, changing quantile ################

lorenzds = Systems.lorenz(rand(3);ρ=32.0)

total_time = 1000
samplings = 50
θ1 = Array{Any}(undef,samplings)
for (i,j) in enumerate(range(1,5,samplings))
timestep = 0.1 * 2^(-j)
X, time = trajectory(lorenzds, total_time, Δt = timestep)
p = 1-1/√(length(X))
ex = Exceedances(p,:mm)
d, θ1[i] = extremevaltheory_dims_persistences(X,ex)
println(i/samplings*100,"%")
end


result = DataFrame(Tables.table(θ1))
rename!(result,:Column1 => )
CSV.write(savename("ExtremalIndexVSTimestepChangingQuantile",nothing,"csv"),result,bufsize = 9000000)


################### Same time step,different length, changing quantile ################


samplings = 50
timestep = 0.1 * 2^(-range(1,5,10)[4])
θ2 = Array{Any}(undef,samplings)
for (i,j) in enumerate(range(1,5,samplings))
total_time = 100 * 2^(j)
X, time = trajectory(lorenzds, total_time, Δt = timestep)
p = 1-1/√(length(X))
ex = Exceedances(p,:mm)
d, θ2[i] = extremevaltheory_dims_persistences(X,ex)
println(i/samplings*100,"%")
end



result = DataFrame(Tables.table(θ2))
rename!(result,:Column1 => )
CSV.write(savename("ExtremalIndexVSTrajectoryLengthChangingQuantile",nothing,"csv"),result,bufsize = 9000000)

################### Same lenght, different time step, fixed quantile ################


total_time = 1000
samplings = 50
θ3 = Array{Any}(undef,samplings)
p = 0.99
for (i,j) in enumerate(range(1,5,samplings))
timestep = 0.1 * 2^(-j)
X, time = trajectory(lorenzds, total_time, Δt = timestep)
ex = Exceedances(p,:mm)
d, θ3[i] = extremevaltheory_dims_persistences(X,ex)
println(i/samplings*100,"%")
end


result = DataFrame(Tables.table(θ3))
rename!(result,:Column1 => )
CSV.write(savename("ExtremalIndexVSTimestepFixedQuantile",nothing,"csv"),result,bufsize = 9000000)



################### Same time step,different length, fixed quantile ################


samplings = 50
timestep = 0.1 * 2^(-range(1,5,10)[4])
θ4 = Array{Any}(undef,samplings)
p = 0.99
for (i,j) in enumerate(range(1,5,samplings))
total_time = 100 * 2^(j)
X, time = trajectory(lorenzds, total_time, Δt = timestep)
ex = Exceedances(p,:mm)
d, θ4[i] = extremevaltheory_dims_persistences(X,ex)
println(i/samplings*100,"%")
end


result = DataFrame(Tables.table(θ4))
rename!(result,:Column1 => )
CSV.write(savename("ExtremalIndexVSTrajectoryLengthFixedQuantile",nothing,"csv"),result,bufsize = 9000000)



############## Plotting #####################


fig = Figure(resolution = (600,1000))
lines(fig[1,1],0.1*2 .^ (-range(1,5,samplings)),[mean(θ1[i]) for i in 1:samplings],axis = (;xlabel = "Δt",ylabel = "Average θ", title= "θ vs. time step",
xticklabelsize = 25, yticklabelsize = 25, xlabelsize = 25, ylabelsize = 25,titlesize = 25, yticks = [0,0.25,0.50,0.75,1],limits = (nothing,(0,1))), label = "Changing quantile")

lines!(0.1*2 .^ (-range(1,5,samplings)),[mean(θ3[i]) for i in 1:samplings], label = "Fixed quantile")
axislegend(position = :rb,labelsize = 25)

lines(fig[2,1],100*2 .^ (range(1,5,samplings)),[mean(θ2[i]) for i in 1:samplings],axis = (; xlabel = "tₗ",ylabel = "Average θ", title= "θ vs. trajectory length",
xticklabelsize = 25, yticklabelsize = 25, xlabelsize = 25, ylabelsize = 25,titlesize = 25, yticks = [0,0.25,0.50,0.75,1],limits = (nothing,(0,1))), label = "Changing quantile")

lines!(100*2 .^ (range(1,5,samplings)),[mean(θ4[i]) for i in 1:samplings], label = "Fixed quantile")
axislegend(position = :rb,labelsize = 25)

current_figure()

save("ExtremalIndexSensitivity.png",current_figure())


############ Reading the data ###########


df1 = CSV.read("ExtremalIndexVSTimestepChangingQuantile.csv",DataFrame)
df2 = CSV.read("ExtremalIndexVSTrajectoryLengthChangingQuantile.csv",DataFrame)
df3 = CSV.read("ExtremalIndexVSTimestepFixedQuantile.csv",DataFrame)
df4 = CSV.read("ExtremalIndexVSTrajectoryLengthFixedQuantile.csv",DataFrame)

samplings =50
θ1 = Array{Any}(undef,samplings)
θ2 = Array{Any}(undef,samplings)
θ3 = Array{Any}(undef,samplings)
θ4 = Array{Any}(undef,samplings)
for i in 1:50
θ1[i] = filter(x->x<1,parse.(Float64, filter(x->!(x=="")&& !(x==" "),split(filter((['[',']']),df1[i,:][1]), ','))))
θ2[i] = filter(x->x<1,parse.(Float64, filter(x->!(x=="")&& !(x==" "),split(filter((['[',']']),df2[i,:][1]), ','))))
θ3[i] = filter(x->x<1,parse.(Float64, filter(x->!(x=="")&& !(x==" "),split(filter((['[',']']),df3[i,:][1]), ','))))
θ4[i] = filter(x->x<1,parse.(Float64, filter(x->!(x=="")&& !(x==" "),split(filter((['[',']']),df4[i,:][1]), ','))))
end


######## Plotting the continuous time spent in cluster #############

fig = Figure(resolution = (600,1000))
lines(fig[1,1], 0.1*2 .^ (-range(1,5,samplings)), [mean(1 ./θ1[i])*( 0.1 * 2^(-range(1,5,samplings)[i])) for i in 1:samplings],axis = (; xlabel = "Δt",ylabel = "Average t in cluster", title= "Time in cluster vs. time step",
xticklabelsize = 25, yticklabelsize = 25, xlabelsize = 25, ylabelsize = 25,titlesize = 25 #=,yticks = [0,0.25,0.50,0.75,1]=#,limits = (nothing,nothing)), label = "Changing quantile")

lines!(0.1*2 .^ (-range(1,5,samplings)),[mean(1 ./θ3[i])*( 0.1 * 2^(-range(1,5,samplings)[i])) for i in 1:samplings], label = "Fixed quantile")
axislegend(position = :rb,labelsize = 25)

lines(fig[2,1],100*2 .^ (range(1,5,samplings)),[mean(1 ./θ2[i])*( 0.1 * 2^(-range(1,5,10)[4])) for i in 1:samplings],axis = (; xlabel = "tₗ",ylabel = "Average t in cluster", title= "Time in cluster vs. trajectory length",
xticklabelsize = 25, yticklabelsize = 25, xlabelsize = 25, ylabelsize = 25,titlesize = 25#=, yticks = [0,0.25,0.50,0.75,1]=#,limits = (nothing,nothing)), label = "Changing quantile")

lines!(100*2 .^ (range(1,5,samplings)),[mean(1 ./θ4[i])*( 0.1 * 2^(-range(1,5,10)[4])) for i in 1:samplings], label = "Fixed quantile")
axislegend(position = :rc,labelsize = 25)

current_figure()


save("ExtremalIndexSensitivityinTime.png",current_figure())
114 changes: 114 additions & 0 deletions scripts/Fat fractal.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
"""
This file computes and plots quantities related with the regular variation property for a randomly
ordered sequence of points laying in a fat Cantor set. The set is defined by interating a map with
a gap, the points that remain after many iterations approximate a invariant set. The first section
of the code constructs the invariant set to be studied and turns it into a data series. The second
iterates the data series to compute the quantities described in the src/regularly_varying.jl file
for a point in the fat Cantor Set. The rest just save, read and plot the data.
"""

using DrWatson
@quickactivate "FractalDimensionsQ"

using CairoMakie
using ChaosTools, OrdinaryDiffEq
using Distances
using DynamicalSystemsBase
using ProgressMeter
using Interpolations
using Random
using FractalDimensions
using Distributions

include(srcdir("regularly_varying.jl"))
include(srcdir("theme.jl"))

########## Generating the set ################


function fat_fractal(u,p,n)
r = 2*(1+1/2^(n+1))
x = u[1]
if x<1/2
dx = x*r
else
dx = (-x+1)*r
end
SVector(dx)
end

function inverseff1(u,p,n)
r = 2*(1+1/2^(n+1))
x = u[1]
dx = x/r
SVector(dx)
end

function inverseff2(u,p,n)
r = 2*(1+1/2^(n+1))
x = u[1]
dx = 1-x/r
SVector(dx)
end

p = [sqrt(2)]



statespace = rand(5)

maxiterations = 25

for i in 1:maxiterations
statespace = fat_fractal.(statespace,p,i)
statespace = filter(x -> 1>x[1]>0 ,statespace)
end


totlength = length(statespace)

statespace2 = []
for j in 1:maxiterations
for i in 1:totlength
push!(statespace2,inverseff1(statespace[i],p,maxiterations-j+1))
push!(statespace2,inverseff2(statespace[i],p,maxiterations-j+1))
end
statespace = statespace2
statespace2 = []
totlength = length(statespace)
end

statespace = [i[1] for i in statespace]

totlength = length(statespace)
shuffledssset = shuffle(statespace)

############### Testing regular variation ########

function vectorrun(u,p,n)
dx = p[n]
SVector(dx)
end
u0 =[0.1]
fat_fractalds = DeterministicIteratedMap(vectorrun,u0,shuffledssset, t0 = 1)

point = [0.1262477477965864]
maxtime = totlength
nevents = 5000
updatetimes, radii, volumeratio, Δloc, points, corr_dim = regularlyvarying(fat_fractalds,point,nevents,maxtime)

maxupdates = length(updatetimes)


############### Plotting ############


plot_regular_variation_one_line(radii, volumeratio, Δloc, corr_dim, (0,1.5), "Fat Cantor set")


save("Fat_fractal.png",current_figure())


result = DataFrame(Tables.table(hcat(updatetimes, radii, volumeratio, Δloc, vcat(points,zeros(maxupdates - nevents,1)),corr_dim)))
rename!(result,:Column1 => :Update_times, :Column2 => :Radii, :Column3 => :volumeratio, :Column4 => :Δloc, :Column5 => :Pointsx, :Column6 => :Corr_dim)
CSV.write(savename("Fat Cantor Set data",nothing,"csv"),result)
Loading

0 comments on commit 1946999

Please sign in to comment.