id
stringlengths 22
42
| metadata
dict | text
stringlengths 9
1.03M
|
---|---|---|
proofpile-julia0005-42849 | {
"provenance": "014.jsonl.gz:242850"
} | function FDivRhoGrad2Vec!(F,cCG,RhoCG,CG,Param)
nz=Param.Grid.nz;
OP=CG.OrdPoly+1;
NF=Param.Grid.NumFaces;
D1cCG = Param.CacheC1
D2cCG = Param.CacheC2
grad1CG = Param.CacheC3
grad2CG = Param.CacheC4
D1gradCG = Param.CacheC1
D2gradCG = Param.CacheC2
vC1 = Param.CacheC3
vC2 = Param.CacheC4
JC = Param.cache.JC
mul!(reshape(D1cCG,OP,OP*NF*nz),CG.DS,reshape(cCG,OP,OP*nz*NF))
mul!(reshape(PermutedDimsArray(D2cCG,(2,1,3,4)),OP,OP*NF*nz),CG.DS,reshape(PermutedDimsArray(cCG,(2,1,3,4)),OP,OP*nz*NF))
grad1CG .= RhoCG .* (Param.dXdxIC11.*D1cCG .+ Param.dXdxIC21.*D2cCG)
grad2CG .= RhoCG .* (Param.dXdxIC12.*D1cCG .+ Param.dXdxIC22.*D2cCG)
D1gradCG .= Param.dXdxIC11.*grad1CG .+ Param.dXdxIC12.*grad2CG
D2gradCG .= Param.dXdxIC21.*grad1CG .+ Param.dXdxIC22.*grad2CG
mul!(reshape(vC1,OP,OP*NF*nz),CG.DW,reshape(D1gradCG,OP,OP*nz*NF))
mul!(reshape(PermutedDimsArray(vC2,(2,1,3,4)),OP,OP*NF*nz),CG.DW,reshape(PermutedDimsArray(D2gradCG,(2,1,3,4)),OP,OP*nz*NF))
F .= F .- Param.HyperDDiv .* (vC1 .+ vC2) ./ JC
end
function FDivRhoGrad2Vec(cCG,RhoCG,CG,Param)
nz=Param.Grid.nz;
OP=CG.OrdPoly+1;
NF=Param.Grid.NumFaces;
dXdxIC = Param.cache.dXdxIC
JC = Param.cache.JC
D1cCG=reshape(
CG.DS*reshape(cCG,OP,OP*NF*nz)
,OP,OP,NF,nz);
D2cCG=permute(reshape(
CG.DS*reshape(
permute(
reshape(cCG,OP,OP,NF,nz)
,[2 1 3 4])
,OP,OP*NF*nz)
,OP,OP,NF,nz)
,[2 1 3 4]);
gradCG1=RhoCG .*
(dXdxIC[:,:,:,:,1,1].*D1cCG +
dXdxIC[:,:,:,:,2,1].*D2cCG);
gradCG2=RhoCG .*
(dXdxIC[:,:,:,:,1,2].*D1cCG +
dXdxIC[:,:,:,:,2,2].*D2cCG);
divCG=(reshape(
CG.DW*(reshape(gradCG1,OP,OP*NF*nz) .*
reshape(dXdxIC[:,:,:,:,1,1],OP,OP*NF*nz) +
reshape(gradCG2,OP,OP*NF*nz) .*
reshape(dXdxIC[:,:,:,:,1,2],OP,OP*NF*nz))
,OP,OP,NF,nz) +
permute(
reshape(
CG.DW*reshape(
permute(
(reshape(gradCG1,OP,OP,NF,nz) .*
dXdxIC[:,:,:,:,2,1] +
reshape(gradCG2,OP,OP,NF,nz) .*
dXdxIC[:,:,:,:,2,2])
,[2 1 3 4])
,OP,OP*NF*nz)
,OP,OP,NF,nz)
,[2 1 3 4]))./JC;
return divCG
end
|
proofpile-julia0005-42850 | {
"provenance": "014.jsonl.gz:242851"
} | function charttype_to_dict(chart::GoogleChart)
[
:chart_type => chart.chart_type,
:chart_id => chart.id,
:width=>chart.width,
:height=>chart.height,
:chart_data => chart.data,
:chart_options => JSON.json(chart.options)
]
end
## take vector of google charts
function charts_to_dict(charts)
packages = union([chart.packages for chart in charts]...)
[:chart_packages => JSON.json(packages),
:charts => [charttype_to_dict(chart) for chart in charts],
:chart_xtra => join([chart.xtra for chart in charts],"\n")
]
end
## Render charts
## io -- render to io stream
## fname -- render to file
## none -- create html file, show in browser
function render{T <: GoogleChart}(io::IO,
charts::Vector{T}, # chart objects
tpl::Union(Nothing, Mustache.MustacheTokens) # Mustache template. Default is entire page
)
details = charts_to_dict(charts)
## defaults
_tpl = isa(tpl, Nothing) ? chart_tpl : tpl
Mustache.render(io, _tpl, details)
end
function render{T <: GoogleChart}(fname::String,
charts::Vector{T},
tpl::Union(Nothing, Mustache.MustacheTokens))
io = open(fname, "w")
render(io, charts, tpl)
close(io)
end
function render{T <: GoogleChart}(charts::Vector{T}, tpl::Union(Nothing, Mustache.MustacheTokens))
fname = tempname() * ".html"
render(fname, charts, tpl)
open_url(fname)
end
## no tpl
render{T <: GoogleChart}(io::IO, charts::Vector{T}) = render(io, charts, nothing)
render{T <: GoogleChart}(fname::String, charts::Vector{T}) = render(fname, charts, nothing)
## no io or file name specified, render to browser
render{T <: GoogleChart}(charts::Vector{T}) = render(charts, nothing)
render{T <: GoogleChart}(io::Nothing, charts::Vector{T}, tpl::Union(Nothing, Mustache.MustacheTokens)) = render(charts, tpl)
render(io::IO, chart::GoogleChart, tpl::Union(Nothing, Mustache.MustacheTokens)) = render(io, [chart], tpl)
render(io::IO, chart::GoogleChart) = render(io, chart, nothing)
render(fname::String, chart::GoogleChart, tpl::Union(Nothing, Mustache.MustacheTokens)) = render(fname, [chart], tpl)
render(fname::String, chart::GoogleChart) = render(fname, [chart], nothing)
render(chart::GoogleChart, tpl::Union(Nothing, Mustache.MustacheTokens)) = render([chart], tpl)
render(chart::GoogleChart) = render([chart])
render(io::Nothing, chart::GoogleChart, tpl::Union(Nothing, Mustache.MustacheTokens)) = render([chart], tpl)
render(io::Nothing, chart::GoogleChart) = render([chart], nothing)
## for using within Gadfly.weave:
gadfly_weave_tpl = """
<div id={{:id}} style="width:{{:width}}px; height:{{:height}}px;"></div>
<script>
var {{:id}}_data = {{{:chart_data}}};
var {{:id}}_options = {{{:chart_options}}};
var {{:id}}_chart = new google.visualization.{{:chart_type}}(document.getElementById('{{:id}}'));{{:id}}_chart.draw({{:id}}_data, {{:id}}_options);
</script>
"""
## this is used by weave...
function gadfly_format(x::CoreChart)
d = [:id => x.id,
:width => 600,
:height => 400,
:chart_data => x.data,
:chart_options => json(x.options),
:chart_type => x.chart_type
]
Mustache.render(gadfly_weave_tpl, d)
end
## IJulia support
import Base.writemime
export writemime
## read https://developers.google.com/loader/#GoogleLoad to see if this can be tidied up
writemime_tpl = """
<div id={{:id}} style="width:{{:width}}px; height:{{:height}}px;"></div>
<script>
function load_chart_{{:id}}() {
var {{:id}}_data = {{{:chart_data}}};
var {{:id}}_options = {{{:chart_options}}};
var {{:id}}_chart = new google.visualization.{{:chart_type}}(document.getElementById('{{:id}}'));{{:id}}_chart.draw({{:id}}_data, {{:id}}_options);
}
setTimeout(function(){
google.load('visualization', '1', {
'callback':load_chart_{{:id}},
'packages':['corechart']
}
)}, 10);
</script>
"""
function writemime(io::IO, ::MIME"text/html", x::GoogleChart)
d = [:id => x.id,
:width => 600,
:height => 400,
:chart_data => x.data,
:chart_options => json(x.options),
:chart_type => x.chart_type
]
out = Mustache.render(writemime_tpl, d)
print(io, out)
end
## inject code into browser if displayable
inject_javascript() = display("text/html", """
<script type='text/javascript' src='https://www.google.com/jsapi'></script>
""")
## inject when package is loaded
if displayable("text/html")
inject_javascript()
end
## in case surface plot is desired (not reliable)
inject_surfaceplot_javascript() = Base.display("text/html", """
<script type='text/javascript' src='http://javascript-surface-plot.googlecode.com/svn/trunk/javascript/SurfacePlot.js'></script>
<script type='text/javascript' src='http://javascript-surface-plot.googlecode.com/svn/trunk/javascript/ColourGradient.js'></script>
""")
## display to browser, or writemime
#function Base.repl_show(io::IO, chart::GoogleChart)
function writemime(io::IO, ::MIME"text/plain", chart::GoogleChart)
if io === STDOUT
render(nothing, chart)
else
writemime(io, "text/html", chart)
end
end
# Base.show(io::IO, chart::GoogleChart) = print(io, "<plot>")
|
proofpile-julia0005-42851 | {
"provenance": "014.jsonl.gz:242852"
} | export prune
"""
prune(m, xs...; trim_args = true)
Returns a model transformed by removing `xs...` and all variables that depend on `xs...`. If `trim_args = true`, unneeded arguments are also removed. Use `trim_args = false` to leave arguments unaffected.
# Examples
```jldoctest
m = @model n begin
α ~ Gamma()
β ~ Gamma()
θ ~ Beta(α,β)
x ~ Binomial(n, θ)
end;
prune(m, :θ)
# output
@model begin
β ~ Gamma()
α ~ Gamma()
end
```
```jldoctest
m = @model n begin
α ~ Gamma()
β ~ Gamma()
θ ~ Beta(α,β)
x ~ Binomial(n, θ)
end;
prune(m, :n)
# output
@model begin
β ~ Gamma()
α ~ Gamma()
θ ~ Beta(α, β)
end
```
"""
function prune(m::Model, xs :: Symbol...; trim_args = true)
po = poset(m) #Creates a new SimplePoset, so no need to copy before mutating
newvars = variables(m)
for x in xs
setdiff!(newvars, above(po,x))
setdiff!(newvars, [x])
end
# Keep arguments in newvars
newargs = arguments(m) ∩ newvars
setdiff!(newvars, newargs)
if trim_args
# keep arguments only if depended upon by newvars
dependencies = mapfoldl(var -> below(po, var), vcat, newvars, init = Symbol[]) # mapfoldl needed since newvars can be empty
newargs = dependencies ∩ newargs
end
theModule = getmodule(m)
m_init = Model(theModule, newargs, NamedTuple(), NamedTuple(), nothing)
m = foldl(newvars; init=m_init) do m0,v
merge(m0, Model(theModule, findStatement(m, v)))
end
end
|
proofpile-julia0005-42852 | {
"provenance": "014.jsonl.gz:242853"
} | module Core
import FileIO;
include("res_dir_tree.jl");
include("add_to_log.jl");
include("build_description.jl");
include("save_and_load_data.jl");
include("save_and_load_desc.jl");
include("load_log.jl");
end |
proofpile-julia0005-42853 | {
"provenance": "014.jsonl.gz:242854"
} | abstract type AbstractStdShape{T} end
#####
# StdPoint
#####
struct StdPoint{T} <: AbstractStdShape{T} end
#####
# StdLine
#####
struct StdLine{T} <: AbstractStdShape{T}
half_length::T
end
get_half_length(line::StdLine) = line.half_length
# head, tail, and vertices for StdLine
get_head(line::StdLine{T}) where {T} = SA.SVector(get_half_length(line), zero(T))
get_tail(line::StdLine{T}) where {T} = SA.SVector(-get_half_length(line), zero(T))
get_vertices(line::StdLine) = (get_tail(line), get_head(line))
# head, tail, and vertices for StdLine at arbitrary position
get_head(line::StdLine{T}, pos::SA.SVector{2, T}) where {T} = pos + get_head(line)
get_tail(line::StdLine{T}, pos::SA.SVector{2, T}) where {T} = pos + get_tail(line)
get_vertices(line::StdLine{T}, pos::SA.SVector{2, T}) where {T} = (get_tail(line, pos), get_head(line, pos))
# head, tail, and vertices for StdLine at arbitrary position and orientation
get_head(line::StdLine{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos + get_half_length(line) * dir
get_tail(line::StdLine{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos - get_half_length(line) * dir
get_vertices(line::StdLine{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = (get_tail(line, pos, dir), get_head(line, pos, dir))
#####
# StdCircle
#####
struct StdCircle{T} <: AbstractStdShape{T}
radius::T
end
get_radius(circle::StdCircle) = circle.radius
function get_area(circle::StdCircle{T}) where {T}
radius = get_radius(circle)
return convert(T, pi * radius * radius)
end
#####
# StdRect
#####
struct StdRect{T} <: AbstractStdShape{T}
half_width::T
half_height::T
end
get_half_width(rect::StdRect) = rect.half_width
get_half_height(rect::StdRect) = rect.half_height
get_width(rect::StdRect) = 2 * get_half_width(rect)
get_height(rect::StdRect) = 2 * get_half_height(rect)
# bottom_left, bottom_right, top_left, top_right, and vertices for StdRect
get_bottom_left(rect::StdRect) = SA.SVector(-get_half_width(rect), -get_half_height(rect))
get_bottom_right(rect::StdRect) = SA.SVector(get_half_width(rect), -get_half_height(rect))
get_top_right(rect::StdRect) = SA.SVector(get_half_width(rect), get_half_height(rect))
get_top_left(rect::StdRect) = SA.SVector(-get_half_width(rect), get_half_height(rect))
get_vertices(rect::StdRect{T}) where {T} = (get_bottom_left(rect), get_bottom_right(rect), get_top_right(rect), get_top_left(rect))
# bottom_left, bottom_right, top_left, top_right, and vertices for StdRect at arbitrary position
get_bottom_left(rect::StdRect{T}, pos::SA.SVector{2, T}) where {T} = pos + SA.SVector(-get_half_width(rect), -get_half_height(rect))
get_bottom_right(rect::StdRect{T}, pos::SA.SVector{2, T}) where {T} = pos + SA.SVector(get_half_width(rect), -get_half_height(rect))
get_top_right(rect::StdRect{T}, pos::SA.SVector{2, T}) where {T} = pos + SA.SVector(get_half_width(rect), get_half_height(rect))
get_top_left(rect::StdRect{T}, pos::SA.SVector{2, T}) where {T} = pos + SA.SVector(-get_half_width(rect), get_half_height(rect))
get_vertices(rect::StdRect{T}, pos::SA.SVector{2, T}) where {T} = (get_bottom_left(rect, pos), get_bottom_right(rect, pos), get_top_right(rect, pos), get_top_left(rect, pos))
# bottom_left, bottom_right, top_left, top_right, and vertices for StdRect at arbitrary position and orientation
get_bottom_left(rect::StdRect{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos - get_half_width(rect) * dir - get_half_height(rect) * rotate_plus_90(dir)
get_bottom_right(rect::StdRect{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos + get_half_width(rect) * dir - get_half_height(rect) * rotate_plus_90(dir)
get_top_right(rect::StdRect{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos + get_half_width(rect) * dir + get_half_height(rect) * rotate_plus_90(dir)
get_top_left(rect::StdRect{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = pos - get_half_width(rect) * dir + get_half_height(rect) * rotate_plus_90(dir)
get_vertices(rect::StdRect{T}, pos::SA.SVector{2, T}, dir::SA.SVector{2, T}) where {T} = (get_bottom_left(rect, pos, dir), get_bottom_right(rect, pos, dir), get_top_right(rect, pos, dir), get_top_left(rect, pos, dir))
get_area(rect::StdRect{T}) where {T} = convert(T, 4 * get_half_width(rect) * get_half_height(rect))
get_normals(rect::StdRect{T}) where {T} = get_normals(rect, SA.SVector(one(T), zero(T)))
function get_normals(rect::StdRect{T}, dir::SA.SVector{2, T}) where {T}
x_cap = dir
y_cap = rotate_plus_90(dir)
return (-y_cap, x_cap, y_cap, -x_cap)
end
|
proofpile-julia0005-42854 | {
"provenance": "014.jsonl.gz:242855"
} | using RasterShadow
using Test
using JLD
@testset "basic input" begin
flat = ones(20,10)
flat_sh = shadowing(flat,45,45,1)
@test size(flat_sh) == size(flat)
@test flat_sh == ones(20,10)
end
testdata = JLD.load(joinpath(@__DIR__,"testdata","testdata.jld"))
dem = testdata["dem"]
@testset "shadowing" begin
@test testdata["sh_30_30"] == shadowing(dem,30,30,0.5)
@test testdata["sh_150_80"] == shadowing(dem,150,80,0.5)
@test testdata["sh_150_80"] == shadowing(dem,150,80,0.5)
end
|
proofpile-julia0005-42855 | {
"provenance": "014.jsonl.gz:242856"
} | # -*- encoding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright © 2021 Matteo Foglieni and Riccardo Gervasoni
#
"""
Scene(
materials::Dict{String, Material} = Dict{String, Material}(),
world::World = World(),
camera::Union{Camera, Nothing} = nothing,
float_variables::Dict{String, Float64} = Dict{String, Float64}(),
string_variables::Dict{String, String} = Dict{String, String}(),
bool_variables::Dict{String, Bool} = Dict{String, Bool}(),
vector_variables::Dict{String,Vec} = Dict{String,Vec}(),
color_variables::Dict{String,RGB{Float32}} = Dict{String,RGB{Float32}}(),
pigment_variables::Dict{String,Pigment} = Dict{String,Pigment}(),
brdf_variables::Dict{String,BRDF} = Dict{String,BRDF}(),
transformation_variables::Dict{String,Transformation} = Dict{String,Transformation}(),
variable_names::Set{String} = Set{String}(),
overridden_variables::Set{String} = Set{String}(),
)
A scene read from a scene file.
See also: [`Material`](@ref), [`World`](@ref), [`Camera`](@ref), [`Vec`](@ref),
[`Pigment`](@ref), [`BRDF`](@ref), [`Transformation`](@ref)
"""
mutable struct Scene
materials::Dict{String, Material}
world::World
camera::Union{Camera, Nothing}
float_variables::Dict{String, Float64}
string_variables::Dict{String, String}
bool_variables::Dict{String,Bool}
vector_variables::Dict{String,Vec}
color_variables::Dict{String,RGB{Float32}}
pigment_variables::Dict{String,Pigment}
brdf_variables::Dict{String,BRDF}
transformation_variables::Dict{String,Transformation}
variable_names::Set{String}
overridden_variables::Set{String}
Scene(
materials::Dict{String, Material} = Dict{String, Material}(),
world::World = World(),
camera::Union{Camera, Nothing} = nothing,
float_variables::Dict{String, Float64} = Dict{String, Float64}(),
string_variables::Dict{String, String} = Dict{String, String}(),
bool_variables::Dict{String, Bool} = Dict{String, Bool}(),
vector_variables::Dict{String,Vec} = Dict{String,Vec}(),
color_variables::Dict{String,RGB{Float32}} = Dict{String,RGB{Float32}}(),
pigment_variables::Dict{String,Pigment} = Dict{String,Pigment}(),
brdf_variables::Dict{String,BRDF} = Dict{String,BRDF}(),
transformation_variables::Dict{String,Transformation} = Dict{String,Transformation}(),
variable_names::Set{String} = Set{String}(),
overridden_variables::Set{String} = Set{String}(),
) = new(
materials,
world,
camera,
float_variables,
string_variables,
bool_variables,
vector_variables,
color_variables,
pigment_variables,
brdf_variables,
transformation_variables,
variable_names,
overridden_variables,
)
end
##########################################################################################92
function expect_symbol(inputstream::InputStream, symbol::String)
token = read_token(inputstream)
if (typeof(token.value) ≠ SymbolToken) || (token.value.symbol ≠ symbol)
throw(GrammarError(token.location, "got $(token) insted of $(symbol)"))
end
end
function expect_symbol(inputstream::InputStream, vec_symbol::Vector{String})
token = read_token(inputstream)
if (typeof(token.value) ≠ SymbolToken) || (token.value.symbol ∉ vec_symbol)
throw(GrammarError(token.location, "got $(token) instead of $(vec_symbol)"))
end
return token.value.symbol
end
"""
expect_symbol(inputstream::InputStream, symbol::String)
expect_symbol(inputstream::InputStream, vec_symbol::Vector{String}) :: String
Read a token from `inputstream` and check that its type is `SymbolToken`
and its value is `symbol`(first method) or a value inside `vec_symbol`
(second method, and return it), throwing `GrammarError` otherwise.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`KeywordEnum`](@ref), [`SymbolToken`](@ref)
"""
expect_symbol
"""
expect_keywords(inputstream::InputStream, keywords::Vector{KeywordEnum}) :: KeywordEnum
Read a token from `inputstream` and check that its type is `KeywordToken`
and its value is one of the keywords in `keywords`, throwing `GrammarError` otherwise.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`KeywordEnum`](@ref), [`KeywordToken`](@ref)
"""
function expect_keywords(inputstream::InputStream, keywords::Vector{KeywordEnum})
token = read_token(inputstream)
if typeof(token.value) ≠ KeywordToken
throw(GrammarError(token.location, "expected a keyword instead of '$(token)' "))
end
if token.value.keyword ∉ keywords
throw(GrammarError(
token.location,
"expected one of the keywords $([x for x in keywords]) instead of '$(token)'"
))
end
return token.value.keyword
end
"""
expect_number(inputstream::InputStream, scene::Scene) :: Float64
Read a token from `inputstream` and check that its type is `LiteralNumberToken`
(i.e. a number) or `IdentifierToken` (i.e. a variable defined in `scene`),
throwing `GrammarError` otherwise.
Return the float64-parsed number or the identifier associated float64-parsed
number, respectively.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`Scene`](@ref), [`LiteralNumberToken`](@ref),
[`IdentifierToken`](@ref)
"""
function expect_number(inputstream::InputStream, scene::Scene, open::Bool=false)
token = read_token(inputstream)
result = ""
if typeof(token.value) == SymbolToken && token.value.symbol == "("
result *= "("*expect_number(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
token = read_token(inputstream)
end
if typeof(token.value) == SymbolToken && token.value.symbol == "-"
result *= "-"
token = read_token(inputstream)
end
while true
if (typeof(token.value) == SymbolToken) && (token.value.symbol ∈ OPERATIONS)
result *= token.value.symbol
elseif (typeof(token.value) == IdentifierToken) && (token.value.identifier ∈ keys(SYM_NUM))
result *= string(SYM_NUM[token.value.identifier])
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if (variable_name ∈ keys(scene.float_variables) )
next_number = scene.float_variables[variable_name]
result *= repr(next_number)
elseif isdefined(Raytracing, Symbol(variable_name)) || isdefined(Base, Symbol(variable_name))
unread_token(inputstream, token)
result *= parse_function(inputstream, scene)
else
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
elseif typeof(token.value) == LiteralNumberToken
result *= repr(token.value.number)
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol=="(")
result *= "("*expect_number(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
else
unread_token(inputstream, token)
break
end
#=
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol==")")
unread_token(inputstream, token)
break
else
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
=#
token = read_token(inputstream)
end
if open == true
return result
else
return eval(Meta.parse(result))
end
end
"""
expect_bool(inputstream::InputStream, scene::Scene) :: Bool
Read a token from `inputstream` and check that its type is `KeywordToken`
or `IdentifierToken` (i.e. a variable defined in `scene`),
throwing `GrammarError` otherwise.
Return the parsed bool or the identifier associated parsed bool, respectively.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`Scene`](@ref), [`KeywordToken`](@ref),
[`IdentifierToken`](@ref)
"""
function expect_bool(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == KeywordToken
unread_token(inputstream, token)
keyword = expect_keywords(inputstream, [ TRUE, FALSE])
if keyword == TRUE
return true
elseif keyword == FALSE
return false
else
throw(ArgumentError("how did you come here?"))
end
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
(variable_name ∈ keys(scene.bool_variables) ) ||
throw(GrammarError(token.location, "unknown bool variable '$(token)'"))
return scene.bool_variables[variable_name]
else
throw(GrammarError(token.location, "got '$(token)' instead of a bool variable"))
end
end
"""
expect_string(inputstream::InputStream, scene::Scene) :: String
Read a token from `inputstream` and check that its type is `StringToken`,
throwing `GrammarError` otherwise.
Return the string associated with the readed `StringToken`.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`Scene`](@ref), [`StringToken`](@ref),
"""
function expect_string(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == StringToken
return token.value.string
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.string_variables)
throw(GrammarError(token.location, "unknown string variable '$(token)'"))
end
return scene.string_variables[variable_name]
else
throw(GrammarError(token.location, "got $(token) instead of a string"))
end
end
"""
expect_identifier(inputstream::InputStream) :: String
Read a token from `inputstream` and check that it is an identifier.
Return the name of the identifier.
Read a token from `inputstream` and check that its type is `IdentifierToken`,
throwing `GrammarError` otherwise.
Return the name of the identifier as a `String`.
Call internally [`read_token`](@ref).
See also: [`InputStream`](@ref), [`Scene`](@ref), [`IdentifierToken`](@ref),
"""
function expect_identifier(inputstream::InputStream)
token = read_token(inputstream)
if (typeof(token.value) ≠ IdentifierToken)
throw(GrammarError(token.location, "got $(token) instead of an identifier"))
end
return token.value.identifier
end
##########################################################################################92
"""
parse_vector(inputstream::InputStream, scene::Scene) :: Vec
Parse a vector from the given `inputstream` and return it.
Call internally [`expect_number`](@ref) and [`expect_symbol`](@ref).
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Vec`](@ref)
"""
function parse_vector(inputstream::InputStream, scene::Scene, open::Bool=false)
token = read_token(inputstream)
result = ""
if typeof(token.value) == SymbolToken && token.value.symbol == "("
result *= "("*parse_vector(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
token = read_token(inputstream)
end
if typeof(token.value) == SymbolToken && token.value.symbol == "-"
result *= "-"
token = read_token(inputstream)
end
while true
if (typeof(token.value) == SymbolToken) && (token.value.symbol ∈ OPERATIONS)
result *= token.value.symbol
elseif (typeof(token.value) == IdentifierToken) && (token.value.identifier ∈ keys(SYM_NUM))
result *= string(SYM_NUM[token.value.identifier])
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if (variable_name ∈ keys(scene.vector_variables) )
next_vector = scene.vector_variables[variable_name]
result *= repr(next_vector)
elseif (variable_name ∈ keys(scene.float_variables) )
next_number = scene.float_variables[variable_name]
result *= repr(next_number)
elseif isdefined(Raytracing, Symbol(variable_name)) || isdefined(Base, Symbol(variable_name))
unread_token(inputstream, token)
result *= parse_function(inputstream, scene)
else
throw(GrammarError(token.location, "unknown float/vector variable '$(token)'"))
end
elseif typeof(token.value) == SymbolToken && token.value.symbol =="["
unread_token(inputstream, token)
expect_symbol(inputstream, "[")
x = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
y = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
z = expect_number(inputstream, scene)
expect_symbol(inputstream, "]")
result*= repr(Vec(x, y, z))
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol=="(")
result *= "("*parse_vector(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
elseif typeof(token.value) == LiteralNumberToken
result *= repr(token.value.number)
else
unread_token(inputstream, token)
break
end
#=
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol==")")
unread_token(inputstream, token)
break
else
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
=#
token = read_token(inputstream)
end
if open == true
return result
else
return eval(Meta.parse(result))
end
end
#=
function parse_vector(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken
unread_token(inputstream, token)
expect_symbol(inputstream, "[")
x = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
y = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
z = expect_number(inputstream, scene)
expect_symbol(inputstream, "]")
return Vec(x, y, z)
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.vector_variables)
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
return scene.vector_variables[variable_name]
end
end
=#
"""
parse_color(inputstream::InputStream, scene::Scene) :: RGB{Float32}
Read the color from the given `inputstream` and return it.
Call internally ['expect_symbol'](@ref) and ['expect_number'](@ref).
See also: ['InputStream'](@ref), ['Scene'](@ref)
"""
function parse_color(inputstream::InputStream, scene::Scene, open::Bool=false)
token = read_token(inputstream)
result = ""
if typeof(token.value) == SymbolToken && token.value.symbol == "("
result *= "("*parse_vector(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
token = read_token(inputstream)
end
if typeof(token.value) == SymbolToken && token.value.symbol == "-"
result *= "-"
token = read_token(inputstream)
end
while true
if (typeof(token.value) == SymbolToken) && (token.value.symbol ∈ OPERATIONS)
result *= token.value.symbol
elseif (typeof(token.value) == IdentifierToken) && (token.value.identifier ∈ keys(SYM_NUM))
result *= string(SYM_NUM[token.value.identifier])
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if (variable_name ∈ keys(scene.color_variables) )
next_color = scene.color_variables[variable_name]
result *= repr(next_color)
elseif (variable_name ∈ keys(scene.float_variables) )
next_number = scene.float_variables[variable_name]
result *= repr(next_number)
elseif isdefined(Raytracing, Symbol(variable_name)) || isdefined(Base, Symbol(variable_name))
unread_token(inputstream, token)
result *= parse_function(inputstream, scene)
else
throw(GrammarError(token.location, "unknown float/color variable '$(token)'"))
end
elseif typeof(token.value) == SymbolToken && token.value.symbol =="<"
unread_token(inputstream, token)
expect_symbol(inputstream, "<")
x = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
y = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
z = expect_number(inputstream, scene)
expect_symbol(inputstream, ">")
result*= repr(RGB{Float32}(x, y, z))
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol=="(")
result *= "("*parse_color(inputstream, scene, true)
expect_symbol(inputstream, ")")
result *= ")"
elseif typeof(token.value) == LiteralNumberToken
result *= repr(token.value.number)
else
unread_token(inputstream, token)
break
end
#=
elseif (typeof(token.value) == SymbolToken) && (token.value.symbol==")")
unread_token(inputstream, token)
break
else
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
=#
token = read_token(inputstream)
end
if open == true
return result
else
return eval(Meta.parse(result))
end
end
#=
function parse_color(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken
unread_token(inputstream, token)
expect_symbol(inputstream, "<")
red = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
green = expect_number(inputstream, scene)
expect_symbol(inputstream, ",")
blue = expect_number(inputstream, scene)
expect_symbol(inputstream, ">")
return RGB{Float32}(red, green, blue)
elseif typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.color_variables)
throw(GrammarError(token.location, "unknown variable '$(token)'"))
end
return scene.color_variables[variable_name]
end
end
=#
"""
parse_pigment(inputstream::InputStream, scene::Scene) :: Pigment
Parse a pigment from the given `inputstream` and return it.
Call internally the following parsing functions:
- [`expect_keywords`](@ref)
- [`expect_symbol`](@ref)
- [`parse_color`](@ref)
- [`expect_number`](@ref)
- [`expect_string`](@ref)
Call internally the following functions and structs of the program:
- [`UniformPigment`](@ref)
- [`CheckeredPigment`](@ref)
- [`ImagePigment`](@ref)
- [`load_image`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Pigment`](@ref)
"""
function parse_pigment(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.pigment_variables)
throw(GrammarError(token.location, "unknown pigment '$(token)'"))
end
return scene.pigment_variables[variable_name]
else
unread_token(inputstream, token)
end
keyword = expect_keywords(inputstream, [ UNIFORM, CHECKERED, IMAGE])
expect_symbol(inputstream, "(")
if keyword == UNIFORM
color = parse_color(inputstream, scene)
result = UniformPigment(color)
elseif keyword == CHECKERED
color1 = parse_color(inputstream, scene)
expect_symbol(inputstream, ",")
color2 = parse_color(inputstream, scene)
expect_symbol(inputstream, ",")
num_of_steps = Int(expect_number(inputstream, scene))
result = CheckeredPigment(color1, color2, num_of_steps)
elseif keyword == IMAGE
file_name = expect_string(inputstream, scene)
image = open(file_name, "r") do image_file; load_image(image_file); end
result = ImagePigment(image)
else
@assert false "This line should be unreachable"
end
expect_symbol(inputstream, ")")
return result
end
"""
parse_brdf(inputstream::InputStream, scene::Scene) :: BRDF
Parse a BRDF from the given `inputstream` and return it.
Call internally the following parsing functions:
- [`expect_keywords`](@ref)
- [`expect_symbol`](@ref)
- [`parse_pigment`](@ref)
Call internally the following functions and structs of the program:
- [`DiffuseBRDF`](@ref)
- [`SpecularBRDF`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`BRDF`](@ref)
"""
function parse_brdf(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.brdf_variables)
throw(GrammarError(token.location, "unknown BRDF '$(token)'"))
end
return scene.brdf_variables[variable_name]
else
unread_token(inputstream, token)
end
brdf_keyword = expect_keywords(inputstream, [ DIFFUSE, SPECULAR])
expect_symbol(inputstream, "(")
pigment = parse_pigment(inputstream, scene)
expect_symbol(inputstream, ")")
if (brdf_keyword == DIFFUSE)
return DiffuseBRDF(pigment)
elseif (brdf_keyword == SPECULAR)
return SpecularBRDF(pigment)
else
@assert false "This line should be unreachable"
end
end
"""
parse_material(inputstream::InputStream, scene::Scene) :: (String, Material)
Parse a Material from the given `inputstream` and return a tuple with the
identifier name of the material and the material itself.
Call internally the following parsing functions:
- [`expect_identifier`](@ref)
- [`expect_symbol`](@ref)
- [`parse_brdf`](@ref)
- [`parse_pigment`](@ref)
Call internally the following functions and structs of the program:
- [`Material`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Token`](@ref), [`Material`](@ref)
"""
function parse_material(inputstream::InputStream, scene::Scene)
name = expect_identifier(inputstream)
expect_symbol(inputstream, "(")
brdf = parse_brdf(inputstream, scene)
expect_symbol(inputstream, ",")
emitted_radiance = parse_pigment(inputstream, scene)
expect_symbol(inputstream, ")")
return name, Material(brdf, emitted_radiance)
end
"""
parse_transformation(inputstream::InputStream, scene::Scene) :: Transformation
Parse a Transformation from the given `inputstream` and return it.
Call internally the following parsing functions:
- [`expect_keywords`](@ref)
- [`expect_symbol`](@ref)
- [`expect_number`](@ref)
- [`parse_vector`](@ref)
- [`read_token`](@ref)
- [`unread_token`](@ref)
Call internally the following functions and structs of the program:
- [`translation`](@ref)
- [`rotation_x`](@ref)
- [`rotation_y`](@ref)
- [`rotation_z`](@ref)
- [`scaling`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Transformation`](@ref)
"""
function parse_transformation(inputstream::InputStream, scene::Scene)
token = read_token(inputstream)
if typeof(token.value) == IdentifierToken
variable_name = token.value.identifier
if variable_name ∉ keys(scene.transformation_variables)
throw(GrammarError(token.location, "unknown pigment '$(token)'"))
end
return scene.transformation_variables[variable_name]
else
unread_token(inputstream, token)
end
result = Transformation()
while true
transformation_kw = expect_keywords(inputstream, [
IDENTITY,
TRANSLATION,
ROTATION_X,
ROTATION_Y,
ROTATION_Z,
SCALING,
])
if transformation_kw == IDENTITY
nothing # Do nothing (this is a primitive form of optimization!)
elseif transformation_kw == TRANSLATION
expect_symbol(inputstream, "(")
result *= translation(parse_vector(inputstream, scene))
expect_symbol(inputstream, ")")
elseif transformation_kw == ROTATION_X
expect_symbol(inputstream, "(")
result *= rotation_x(expect_number(inputstream, scene))
expect_symbol(inputstream, ")")
elseif transformation_kw == ROTATION_Y
expect_symbol(inputstream, "(")
result *= rotation_y(expect_number(inputstream, scene))
expect_symbol(inputstream, ")")
elseif transformation_kw == ROTATION_Z
expect_symbol(inputstream, "(")
result *= rotation_z(expect_number(inputstream, scene))
expect_symbol(inputstream, ")")
elseif transformation_kw == SCALING
expect_symbol(inputstream, "(")
result *= scaling(parse_vector(inputstream, scene))
expect_symbol(inputstream, ")")
end
# We must peek the next token to check if there is another transformation that is being
# chained or if the sequence ends. Thus, this is a LL(1) parser.
next_kw = read_token(inputstream)
if (typeof(next_kw.value) ≠ SymbolToken) || (next_kw.value.symbol ≠ "*")
# Pretend you never read this token and put it back!
unread_token(inputstream, next_kw)
break
end
end
return result
end
"""
parse_camera(inputstream::InputStream, scene::Scene) :: Camera
Parse a Camera from the given `inputstream` and return it.
Call internally the following parsing functions:
- [`expect_symbol`](@ref)
- [`expect_keywords`](@ref)
- [`expect_number`](@ref)
- [`parse_transformation`](@ref)
Call internally the following functions and structs of the program:
- [`OrthogonalCamera`](@ref)
- [`PerspectiveCamera`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Camera`](@ref)
"""
function parse_camera(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
type_kw = expect_keywords(inputstream, [ PERSPECTIVE, ORTHOGONAL])
expect_symbol(inputstream, ",")
transformation = parse_transformation(inputstream, scene)
if type_kw == PERSPECTIVE
expect_symbol(inputstream, ",")
distance = expect_number(inputstream, scene)
expect_symbol(inputstream, ")")
result = PerspectiveCamera(distance, 1.0, transformation)
elseif type_kw == ORTHOGONAL
expect_symbol(inputstream, ")")
result = OrthogonalCamera(1.0, transformation)
end
return result
end
##########################################################################################92
"""
parse_pointlight(inputstream::InputStream, scene::Scene) :: PointLight
Parse a PointLight from the given `inputstream` and return it.
Call internally the following parsing functions:
- [`read_token`](@ref)
- [`unread_token`](@ref)
- [`expect_number`](@ref)
- [`expect_symbol`](@ref)
- [`parse_vector`](@ref)
- [`parse_color`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`PointLight`](@ref)
"""
function parse_pointlight(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
point = parse_vector(inputstream, scene)
expect_symbol(inputstream, ",")
color = parse_color(inputstream, scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken && token.value.symbol == ","
unread_token(inputstream, token)
expect_symbol(inputstream, ",")
linear_radius = expect_number(inputstream, scene)
else
unread_token(inputstream, token)
linear_radius = 0.0
end
expect_symbol(inputstream, ")")
return PointLight(
Point(point.x, point.y, point.z),
color,
linear_radius,
)
end
"""
parse_sphere(inputstream::InputStream, scene::Scene) :: Sphere
Parse a Sphere from the given `inputstream` and return it.
Throws `GrammarError` if the specified `Material` does not exist.
Call internally the following parsing functions:
- [`expect_symbol`](@ref)
- [`expect_identifier`](@ref)
- [`parse_transformation`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Sphere`](@ref)
[`Material`](@ref)
"""
function parse_sphere(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
material_name = expect_identifier(inputstream)
if material_name ∉ keys(scene.materials)
# We raise the exception here because inputstream is pointing to the end of the wrong identifier
throw(GrammarError(inputstream.location, "unknown material $(material_name)"))
end
expect_symbol(inputstream, ",")
transformation = parse_transformation(inputstream, scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken && token.value.symbol == ","
unread_token(inputstream, token)
expect_symbol(inputstream, ",")
flag_pointlight = expect_bool(inputstream, scene)
expect_symbol(inputstream, ",")
flag_background = expect_bool(inputstream, scene)
expect_symbol(inputstream, ")")
else
unread_token(inputstream, token)
expect_symbol(inputstream, ")")
flag_pointlight = false
flag_background = false
end
return Sphere(transformation, scene.materials[material_name], flag_pointlight, flag_background)
end
"""
parse_plane(inputstream::InputStream, scene::Scene) :: Plane
Parse a Plane from the given `inputstream` and return it.
Throws `GrammarError` if the specified `Material` does not exist.
Call internally the following parsing functions:
- [`expect_symbol`](@ref)
- [`expect_identifier`](@ref)
- [`parse_transformation`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Plane`](@ref),
[`Material`](@ref)
"""
function parse_plane(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
material_name = expect_identifier(inputstream)
if material_name ∉ keys(scene.materials)
# We raise the exception here because inputstream is pointing to the end of the wrong identifier
throw(GrammarError(inputstream.location, "unknown material $(material_name)"))
end
expect_symbol(inputstream, ",")
transformation = parse_transformation(inputstream, scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken && token.value.symbol == ","
unread_token(inputstream, token)
expect_symbol(inputstream, ",")
flag_pointlight = expect_bool(inputstream, scene)
expect_symbol(inputstream, ",")
flag_background = expect_bool(inputstream, scene)
expect_symbol(inputstream, ")")
else
unread_token(inputstream, token)
expect_symbol(inputstream, ")")
flag_pointlight = false
flag_background = false
end
return Plane(transformation, scene.materials[material_name], flag_pointlight, flag_background)
end
"""
parse_cube(inputstream::InputStream, scene::Scene) :: Cube
Parse a Cube from the given `inputstream` and return it.
Throws `GrammarError` if the specified `Material` does not exist.
Call internally the following parsing functions:
- [`expect_symbol`](@ref)
- [`expect_identifier`](@ref)
- [`parse_transformation`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Cube`](@ref)
[`Material`](@ref)
"""
function parse_cube(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
material_name = expect_identifier(inputstream)
if material_name ∉ keys(scene.materials)
# We raise the exception here because inputstream is pointing to the end of the wrong identifier
throw(GrammarError(inputstream.location, "unknown material $(material_name)"))
end
expect_symbol(inputstream, ",")
transformation = parse_transformation(inputstream, scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken && token.value.symbol == ","
unread_token(inputstream, token)
expect_symbol(inputstream, ",")
flag_pointlight = expect_bool(inputstream, scene)
expect_symbol(inputstream, ",")
flag_background = expect_bool(inputstream, scene)
expect_symbol(inputstream, ")")
else
unread_token(inputstream, token)
expect_symbol(inputstream, ")")
flag_pointlight = false
flag_background = false
end
return Cube(transformation, scene.materials[material_name], flag_pointlight, flag_background)
end
"""
parse_triangle(inputstream::InputStream, scene::Scene) :: Triangle
Parse a Triangle from the given `inputstream` and return it.
Throws `GrammarError` if the specified `Material` does not exist.
Call internally the following parsing functions:
- [`expect_symbol`](@ref)
- [`expect_identifier`](@ref)
- [`parse_transformation`](@ref)
See also: [`InputStream`](@ref), [`Scene`](@ref), [`Triangle`](@ref)
[`Material`](@ref)
"""
function parse_triangle(inputstream::InputStream, scene::Scene)
expect_symbol(inputstream, "(")
material_name = expect_identifier(inputstream)
if material_name ∉ keys(scene.materials)
# We raise the exception here because inputstream is pointing to the end of the wrong identifier
throw(GrammarError(inputstream.location, "unknown material $(material_name)"))
end
expect_symbol(inputstream, ",")
p1 = parse_vector(inputstream, scene)
expect_symbol(inputstream, ",")
p2 = parse_vector(inputstream, scene)
expect_symbol(inputstream, ",")
p3 = parse_vector(inputstream, scene)
token = read_token(inputstream)
if typeof(token.value) == SymbolToken && token.value.symbol == ","
unread_token(inputstream, token)
expect_symbol(inputstream, ",")
flag_pointlight = expect_bool(inputstream, scene)
expect_symbol(inputstream, ",")
flag_background = expect_bool(inputstream, scene)
expect_symbol(inputstream, ")")
else
unread_token(inputstream, token)
expect_symbol(inputstream, ")")
flag_pointlight = false
flag_background = false
end
return Triangle(p1, p2, p3, scene.materials[material_name], flag_pointlight, flag_background)
end
|
proofpile-julia0005-42856 | {
"provenance": "014.jsonl.gz:242857"
} | struct SystemMatrix
rows::Any
cols::Any
vals::Any
function SystemMatrix(rows, cols, vals)
@assert length(rows) == length(cols) == length(vals)
new(rows, cols, vals)
end
end
function SystemMatrix()
rows = Int[]
cols = Int[]
vals = zeros(0)
SystemMatrix(rows, cols, vals)
end
function Base.show(io::IO, sysmatrix::SystemMatrix)
numvals = length(sysmatrix.rows)
str = "SystemMatrix with $numvals entries"
print(io, str)
end
function assemble!(matrix::SystemMatrix, rows, cols, vals)
@assert length(rows) == length(cols) == length(vals)
append!(matrix.rows, rows)
append!(matrix.cols, cols)
append!(matrix.vals, vals)
end
struct SystemRHS
rows::Any
vals::Any
function SystemRHS(rows, vals)
@assert length(rows) == length(vals)
new(rows, vals)
end
end
function SystemRHS()
SystemRHS(Int[], zeros(0))
end
function Base.show(io::IO, sysrhs::SystemRHS)
numvals = length(sysrhs.rows)
str = "SystemRHS with $numvals entries"
print(io, str)
end
function assemble!(systemrhs::SystemRHS, rows, vals)
@assert length(rows) == length(vals)
append!(systemrhs.rows, rows)
append!(systemrhs.vals, vals)
end
function node_to_dof_id(nodeid, dof, dofspernode)
return (nodeid - 1) * dofspernode + dof
end
function element_dofs(nodeids, dofspernode)
numnodes = length(nodeids)
extnodeids = repeat(nodeids, inner = dofspernode)
dofs = repeat(1:dofspernode, numnodes)
edofs = [node_to_dof_id(n, d, dofspernode) for (n, d) in zip(extnodeids, dofs)]
return edofs
end
function element_dofs_to_operator_dofs(rowdofs, coldofs)
nr = length(rowdofs)
nc = length(coldofs)
rows = repeat(rowdofs, outer = nc)
cols = repeat(coldofs, inner = nr)
return rows, cols
end
function assemble_couple_cell_matrix!(sysmatrix, nodeids1, nodeids2, dofspernode, vals)
edofs1 = element_dofs(nodeids1, dofspernode)
edofs2 = element_dofs(nodeids2, dofspernode)
rows, cols = element_dofs_to_operator_dofs(edofs1, edofs2)
assemble!(sysmatrix, rows, cols, vals)
end
function assemble_cell_matrix!(sysmatrix::SystemMatrix, nodeids, dofspernode, vals)
edofs = element_dofs(nodeids, dofspernode)
rows, cols = element_dofs_to_operator_dofs(edofs, edofs)
assemble!(sysmatrix, rows, cols, vals)
end
function assemble_bilinear_form!(
sysmatrix::SystemMatrix,
cellmatrix,
nodalconnectivity,
dofspernode,
)
ncells = size(nodalconnectivity)[2]
vals = vec(cellmatrix)
for cellid = 1:ncells
nodeids = nodalconnectivity[:, cellid]
assemble_cell_matrix!(sysmatrix, nodeids, dofspernode, vals)
end
end
function assemble_bilinear_form!(sysmatrix::SystemMatrix, cellmatrix, mesh::Mesh)
dofspernode = dimension(mesh)
nodalconnectivity = nodal_connectivity(mesh)
assemble_bilinear_form!(sysmatrix, cellmatrix, nodalconnectivity, dofspernode)
end
function assemble_cell_rhs!(sysrhs, nodeids, dofspernode, vals)
rows = element_dofs(nodeids, dofspernode)
assemble!(sysrhs, rows, vals)
end
function assemble_body_force_linear_form!(
systemrhs,
rhsfunc,
basis,
quad,
cellmaps,
nodalconnectivity,
)
ncells = length(cellmaps)
nf = number_of_basis_functions(basis)
dim = dimension(basis)
@assert size(nodalconnectivity) == (nf, ncells)
for (idx, cellmap) in enumerate(cellmaps)
rhs = linear_form(rhsfunc, basis, quad, cellmap)
edofs = element_dofs(nodalconnectivity[:, idx], dim)
assemble!(systemrhs, edofs, rhs)
end
end
function assemble_body_force_linear_form!(systemrhs, rhsfunc, basis, quad, mesh::Mesh)
cellmaps = cell_maps(mesh)
nodalconnectivity = nodal_connectivity(mesh)
assemble_body_force_linear_form!(
systemrhs,
rhsfunc,
basis,
quad,
cellmaps,
nodalconnectivity,
)
end
function assemble_traction_force_linear_form!(
systemrhs,
tractionfunc,
basis,
facequads,
cellmaps,
nodalconnectivity,
cellconnectivity,
istractionboundary,
)
dim = dimension(basis)
refmidpoints = reference_face_midpoints()
isboundarycell = is_boundary_cell(cellconnectivity)
cellids = findall(isboundarycell)
facedetjac = face_determinant_jacobian(cellmaps[1])
for cellid in cellids
cellmap = cellmaps[cellid]
for (faceid, nbrcellid) in enumerate(cellconnectivity[:, cellid])
if nbrcellid == 0
if istractionboundary(cellmap(refmidpoints[faceid]))
rhs = linear_form(
tractionfunc,
basis,
facequads[faceid],
cellmap,
facedetjac[faceid],
)
edofs = element_dofs(nodalconnectivity[:, cellid], dim)
assemble!(systemrhs, edofs, rhs)
end
end
end
end
end
function assemble_traction_force_component_linear_form!(
systemrhs,
tractionfunc,
basis,
facequads,
cellmaps,
nodalconnectivity,
cellconnectivity,
istractionboundary,
component,
)
dim = dimension(basis)
refmidpoints = reference_face_midpoints()
isboundarycell = is_boundary_cell(cellconnectivity)
cellids = findall(isboundarycell)
facedetjac = face_determinant_jacobian(cellmaps[1])
for cellid in cellids
cellmap = cellmaps[cellid]
for (faceid, nbrcellid) in enumerate(cellconnectivity[:, cellid])
if nbrcellid == 0
if istractionboundary(cellmap(refmidpoints[faceid]))
rhs = component_linear_form(
tractionfunc,
basis,
facequads[faceid],
component,
cellmap,
facedetjac[faceid],
)
edofs = element_dofs(nodalconnectivity[:, cellid], dim)
assemble!(systemrhs, edofs, rhs)
end
end
end
end
end
function assemble_stress_linear_form!(
systemrhs,
basis,
quad,
stiffness,
nodaldisplacement,
nodalconnectivity,
jacobian,
)
dim = dimension(basis)
sdim = number_of_symmetric_degrees_of_freedom(dim)
nf, ncells = size(nodalconnectivity)
for cellid = 1:ncells
nodeids = nodalconnectivity[:, cellid]
elementdofs = element_dofs(nodeids, dim)
celldisp = nodaldisplacement[elementdofs]
rhs = stress_cell_rhs(basis, quad, stiffness, celldisp, jacobian)
stressdofs = element_dofs(nodeids, sdim)
assemble!(systemrhs, stressdofs, rhs)
end
end
function make_sparse(sysmatrix::SystemMatrix, ndofs::Int)
return dropzeros!(sparse(sysmatrix.rows, sysmatrix.cols, sysmatrix.vals, ndofs, ndofs))
end
function make_sparse(sysmatrix::SystemMatrix, mesh)
totaldofs = number_of_degrees_of_freedom(mesh)
return make_sparse(sysmatrix, totaldofs)
end
function make_sparse_stress_operator(sysmatrix, mesh)
dim = dimension(mesh)
sdim = number_of_symmetric_degrees_of_freedom(dim)
numnodes = number_of_nodes(mesh)
totaldofs = sdim * numnodes
return make_sparse(sysmatrix, totaldofs)
end
function rhs(sysrhs::SystemRHS, ndofs::Int)
return Array(sparsevec(sysrhs.rows, sysrhs.vals, ndofs))
end
function rhs(sysrhs::SystemRHS, mesh)
totaldofs = number_of_degrees_of_freedom(mesh)
return rhs(sysrhs, totaldofs)
end
function stress_rhs(sysrhs, mesh)
dim = dimension(mesh)
sdim = number_of_symmetric_degrees_of_freedom(dim)
numnodes = number_of_nodes(mesh)
totaldofs = sdim * numnodes
return rhs(sysrhs, totaldofs)
end
|
proofpile-julia0005-42857 | {
"provenance": "014.jsonl.gz:242858"
} | module GameDataManager
using Printf
using REPL.TerminalMenus
using JSON, JSONPointer, JSONSchema
using XLSXasJSON
using OrderedCollections
using PrettyTables
export init_project, xl, xlookup
include("abstractmeta.jl")
include("config.jl")
include("tables.jl")
include("init.jl")
include("setup.jl")
include("exporter.jl")
include("localizer.jl")
include("schema.jl")
include("show.jl")
include("utils.jl")
end # module
|
proofpile-julia0005-42858 | {
"provenance": "014.jsonl.gz:242859"
} | using Hooks
using Test
@testset "Hooks.jl" begin
@testset "Basic Notification" begin
run = false
handle(hook"test") do
run = true
end
notify(hook"test")
@test run
reset(hook"test")
end
@testset "Multiple handlers" begin
run1 = false
run2 = false
handle(hook"test") do
run1 = true
end
handle(hook"test") do
run2 = true
end
notify(hook"test")
@test run1
@test run2
reset(hook"test")
end
@testset "Hook Resetting" begin
run1 = false
run2 = false
handle(hook"test") do
run1 = true
end
handle(hook"test") do
run2 = true
end
reset(hook"test")
notify(hook"test")
@test !run1
@test !run2
reset(hook"test")
end
@testset "Reverse order registration" begin
run1 = false
run2 = false
notify(hook"foobar")
handle(hook"foobar") do
run1 = true
end
handle(hook"foobar") do
run2 = true
end
@test run1
@test run2
reset(hook"foobar")
end
@testset "Deregistration" begin
run1 = false
run2 = false
h1 = handle(hook"test") do
run1 = true
end
h2 = handle(hook"test") do
run2 = true
end
unhandle(h1, hook"test")
notify(hook"test")
@test !run1
@test run2
reset(hook"test")
end
@testset "Multiple Hooks" begin
run1 = false
run2 = false
h1 = handle(hook"test1") do
run1 = true
end
h2 = handle(hook"test2") do
run2 = true
end
notify(hook"test1")
notify(hook"test2")
@test run1
@test run2
reset(hook"test1")
reset(hook"test2")
end
@testset "Errors on resettinging nonexistent hook" begin
@test_throws ErrorException reset(hook"does-not-exist")
end
@testset "Errors on unhandling nonexistent hook" begin
@test_throws ErrorException unhandle(()->100, hook"does-not-exist")
end
@testset "Errors on unhandling nonexistent handler" begin
handle(()->42, hook"test")
@test_throws ErrorException unhandle(()->100, hook"test")
reset(hook"test")
end
@testset "Duplicate Handlers are merged" begin
called = 0
function duphandle()
called += 1
end
handle(duphandle, hook"test")
handle(duphandle, hook"test")
notify(hook"test")
@test called == 1
reset(hook"test")
end
@testset "Handlers can take arguments" begin
args1 = nothing
args2 = nothing
handle(hook"test") do x, y
args1 = (x,y)
end
notify(hook"test", 100, 200)
# also test the handlers get called when registered afterwards
handle(hook"test") do x, y
args2 = (x,y)
end
@test args1 == (100, 200)
@test args2 == (100, 200)
reset(hook"test")
end
end
|
proofpile-julia0005-42859 | {
"provenance": "014.jsonl.gz:242860"
} | # MIPLearn: Extensible Framework for Learning-Enhanced Mixed-Integer Optimization
# Copyright (C) 2020-2021, UChicago Argonne, LLC. All rights reserved.
# Released under the modified BSD license. See COPYING.md for more details.
function init_miplearn_ext(model)::Dict
if :miplearn ∉ keys(model.ext)
model.ext[:miplearn] = Dict()
model.ext[:miplearn]["instance_features"] = [0.0]
model.ext[:miplearn]["variable_features"] = Dict{AbstractString,Vector{Float64}}()
model.ext[:miplearn]["variable_categories"] = Dict{AbstractString,String}()
model.ext[:miplearn]["constraint_features"] = Dict{AbstractString,Vector{Float64}}()
model.ext[:miplearn]["constraint_categories"] = Dict{AbstractString,String}()
end
return model.ext[:miplearn]
end
function set_features!(m::Model, f::Array{Float64})::Nothing
ext = init_miplearn_ext(m)
ext["instance_features"] = f
return
end
function set_features!(v::VariableRef, f::Array{Float64})::Nothing
ext = init_miplearn_ext(v.model)
n = _get_and_check_name(v)
ext["variable_features"][n] = f
return
end
function set_category!(v::VariableRef, category::String)::Nothing
ext = init_miplearn_ext(v.model)
n = _get_and_check_name(v)
ext["variable_categories"][n] = category
return
end
function set_features!(c::ConstraintRef, f::Array{Float64})::Nothing
ext = init_miplearn_ext(c.model)
n = _get_and_check_name(c)
ext["constraint_features"][n] = f
return
end
function set_category!(c::ConstraintRef, category::String)::Nothing
ext = init_miplearn_ext(c.model)
n = _get_and_check_name(c)
ext["constraint_categories"][n] = category
return
end
function set_lazy_callback!(model::Model, find_cb::Function, enforce_cb::Function)::Nothing
ext = init_miplearn_ext(model)
ext["lazy_find_cb"] = find_cb
ext["lazy_enforce_cb"] = enforce_cb
return
end
macro feature(obj, features)
quote
set_features!($(esc(obj)), $(esc(features)))
end
end
macro category(obj, category)
quote
set_category!($(esc(obj)), $(esc(category)))
end
end
macro lazycb(obj, find_cb, enforce_cb)
quote
set_lazy_callback!($(esc(obj)), $(esc(find_cb)), $(esc(enforce_cb)))
end
end
function _get_and_check_name(obj)
n = name(obj)
length(n) > 0 || error(
"Features and categories can only be assigned to variables and " *
"constraints that have names. Unnamed model element detected.",
)
return n
end
export @feature, @category, @lazycb
|
proofpile-julia0005-42860 | {
"provenance": "014.jsonl.gz:242861"
} | const tf = TensorFlow
function _make_infeed_op(sess, eltypes, sizes, inputs; device=nothing, device_ordinal=0)
desc = tf.NodeDescription(tf_graph(sess), "InfeedEnqueueTuple", tf.get_name("InfeedEnqueueTuple"))
tf.set_attr_shape_list(desc, "shapes", Vector{Int64}[collect(x) for x in sizes])
tf.set_attr_list(desc, "dtypes", DataType[eltypes...])
desc["device_ordinal"] = device_ordinal
tf.add_input(desc, inputs)
if device !== nothing
tf.set_device(desc, device)
end
eq = tf.Tensor(tf.Operation(desc));
eq
end
function make_infeed_op(sess, tup::NTuple{N, AbstractArray} where N; device=nothing, device_ordinal=0)
placeholders = [tf.placeholder(eltype(el), shape=size(el)) for el in tup]
eq = _make_infeed_op(sess, map(eltype, tup), map(size, tup), placeholders; device=device, device_ordinal=device_ordinal)
feeds = Dict((x=>y for (x, y) in zip(placeholders, tup))...)
eq, feeds
end
function make_outfeed_op(sess, tup::Type{<:NTuple}; device=nothing, device_ordinal=0)
desc = tf.NodeDescription(tf_graph(sess), "OutfeedDequeueTuple", tf.get_name("OutfeedDequeueTuple"))
tf.set_attr_shape_list(desc, "shapes", Vector{Int64}[collect(size(x)) for x in tup.parameters])
tf.set_attr_list(desc, "dtypes", DataType[eltype(x) for x in tup.parameters])
desc["device_ordinal"] = device_ordinal
if device !== nothing
tf.set_device(desc, device)
end
eq = tf.Tensor(tf.Operation(desc));
eq
end
function infeed(sess, tup::NTuple{N, AbstractArray} where N; device=nothing)
eq, feeds = make_infeed_op(sess, tup; device=device)
run(sess, eq, feeds)
end
function outfeed(sess, tup::Type{<:NTuple}; device=nothing)
eq = make_outfeed_op(sess, tup; device=device)
run(sess, eq)
end
function infeed_and_outfeed(sess, infeed_tup::NTuple{N, AbstractArray} where N,
outfeed_tup::Type{<:NTuple}; device=nothing)
eq_infeed, feeds = make_infeed_op(sess, infeed_tup; device=device)
eq_outfeed = make_outfeed_op(sess, outfeed_tup; device=device)
run(sess, [eq_infeed, eq_outfeed], feeds)[2]
end
|
proofpile-julia0005-42861 | {
"provenance": "014.jsonl.gz:242862"
} | using Pkg;
using Flux;
using DifferentialEquations;
using DiffEqSensitivity;
using StochasticDiffEq;
using Zygote;
using LinearAlgebra;
using Optim;
using Plots;
using Serialization;
using PyCall;
Pkg.activate("./");
Pkg.instantiate();
include("learningProb.jl");
include("mleLearning.jl");
include("postProcess.jl");
# for this test args will be ignored
function generateFullTetradLearningProblem(args)
#--------------------begin problem setup--------------------#
vgtDataPath = "/groups/chertkov/data/tetradData/InertialRangeData/aij_1024_gaus8_tetrad.bin";
rhoDataPath = "/groups/chertkov/data/tetradData/InertialRangeData/roij_1024_gaus8_tetrad.bin";
py"""
import numpy as np
mij = np.fromfile(file=$vgtDataPath, dtype=float)
mij = mij.reshape([32768, 1500, 3, 3])
roij = np.fromfile(file=$rhoDataPath, dtype=float)
roij = roij.reshape([32768, 1500, 3, 3])
"""
roij = PyArray(py"roij"o);
mij = PyArray(py"mij"o);
roij = reshape(roij, (32768, 1500, 9));
mij = reshape(mij, (32768, 1500, 9));
trainingData = cat(roij,mij,dims=3);
trainingData = permutedims(trainingData, (3, 2, 1));
trainingData = trainingData[:,1:200,:]; #only look at first 200 timesteps
initialConditions = trainingData[:,1,:];
#split into training/test
validationData = trainingData[:,:,:];
trainingData = trainingData[:,:,:];
numDims, numSteps, numTrajs = size(trainingData);
dt = 4e-4;
tsteps = Array{Float64, 1}(range(0.0, step=dt, length=numSteps));
x,y = parseTrainingData(trainingData, dt);
miniBatchSize = 1000; #see HyperParameterTuning for justification
maxiters = 5000; #plateau's around 4k
#optimizer = Flux.Optimise.Optimiser(ExpDecay(0.001, 0.5, 1000, 1e-7), ADAM(0.001));
optimizer = ADAM(0.01);
# see https://fluxml.ai/Flux.jl/stable/training/optimisers/
hiddenDim = 80;
θ_f, drift_nn = Flux.destructure(Chain(Dense(18,hiddenDim,tanh),
Dense(hiddenDim,hiddenDim,tanh),
Dense(hiddenDim,9)));
function encode(u,p,args...)
return u;
end
function decode(u,p,args...)
return u;
end
function trueDrift(u,p,t)
return nothing;
end
function truePrecisionMat(u,p,t)
return nothing;
end
function trueDiff(u, p, t)
return nothing;
end
function parameterizedDriftClosure(u,p)
return drift_nn(p)(u);
end
function parameterizedPrecisionMat(u,p)
matPiece = p[1:9];
biasPiece = p[10:end];
prescribedMat = (matPiece .* (u)).^2 + biasPiece;
return prescribedMat;
end
driftLength = length(θ_f);
diffLength = 0;
driftStart = 1;
if (driftLength > 0)
driftEnd = driftStart + (driftLength-1);
diffStart = driftEnd+1;
else
driftEnd = 0;
diffStart = 1;
end
diffEnd = diffStart + (diffLength-1);
function modelDrift(u,p,t)
numSamples = size(u)[end];
ρ = [reshape(u[1:9,i], (3,3)) for i in 1:numSamples];
M = [reshape(u[10:18,i], (3,3)) for i in 1:numSamples];
closure = parameterizedDriftClosure(u,p);
placeholder = [ reshape(-M[i]^2 + reshape(closure[:,i],(3,3)), 9) for i in 1:numSamples];
return hcat(placeholder...);
end
function fullSystemDrift(u,p,t)
return modelDrift(u,p,t);
end
function createNoiseRateForcingHIT(D_a, D_s)
c = zeros(3,3,3,3);
delta(i,j) = (i==j) ? 1 : 0;
for i in 1:3
for j in 1:3
for k in 1:3
for l in 1:3
term1 = -(1/3)*sqrt(D_s/5)*delta(i,j)*delta(k,l);
term2 = (1/2)*(sqrt(D_s/5) + sqrt(D_a/3))*delta(i,k)*delta(j,l);
term3 = (1/2)*(sqrt(D_s/5) - sqrt(D_a/3))*delta(i,l)*delta(j,k);
c[i,j,k,l] = term1 + term2 + term3;
end
end
end
end
return c;
end
c = reshape(createNoiseRateForcingHIT(15,15), (9,9));
function modelDiff(u,p,t)
return [Matrix(I,9,9) zeros(9,9)
zeros(9,9) c];
end
#statically allocate for training
driftGrad = zeros(driftLength);
driftGradHolder = zeros(driftLength, miniBatchSize);
predictionErrorGrad = zeros(diffLength);
predictionErrorGradHolder = zeros(diffLength, miniBatchSize);
detPiece = zeros(diffLength);
detPieceHolder = zeros(diffLength, miniBatchSize);
precisionMat = zeros(9,miniBatchSize);
lossPerStep = zeros(miniBatchSize);
debiasWeight = zeros(miniBatchSize);
function predict_(x, p)
#assume autonomous
return prob.modelDrift_(x, p, 0.0);
end
function lossAndGrads(x,y,p,_drift,_precisionMat)
eps = 64.0; #approximately median(\dot M)
δ = _drift(x,p,0.0) .- y[10:18,:];
for i in 1:miniBatchSize
debiasWeight[i] = 1.0/(norm(reshape(y[10:18,i], (3,3)))^2 + eps);
lossPerStep[i] = debiasWeight[i]*dot(δ[:,i],δ[:,i]);
end
sumLoss = (dt*eps/miniBatchSize)*sum(lossPerStep);
#-----------------------------drift grad-----------------------------#
if (driftLength > 0)
#x is state variable, w is weight vector, pullback(p->f(x,p), p)[2](w)[1] performs the tensor contraction
# (∂/∂p_i f_k) w^k
∂f(x,w) = pullback(p->parameterizedDriftClosure(x,p), p[driftStart:driftEnd])[2](w)[1];
Threads.@threads for i in 1:miniBatchSize
driftGradHolder[:,i] .= ∂f(x[:,i], debiasWeight[i]*δ[:,i]);
end
driftGrad = ((2*dt*eps)/miniBatchSize)*sum(driftGradHolder, dims=2);
end
#-----------------------------diff grad-----------------------------#
diffGrad = zeros(diffLength);
if (diffLength > 0)
#x is state variable, w is weight vector, pullback(p->f(x,p), p)[2](w)[1] performs the tensor contraction
# (∂/∂p_i f_k) w^k
∂Π(x,w) = pullback(p_->_precisionMat(x,p_), p[diffStart:diffEnd])[2](w)[1];
#Threads.@threads for i in 1:miniBatchSize
for i in 1:miniBatchSize
predictionErrorGradHolder[:,i] .= ∂Π(x[:,i], abs2.(δ[:,i]));
detPieceHolder[:,i] .= ∂Π(x[:,i], precisionMat[:,i].^(-1));
end
predictionErrorGrad = dt*sum(predictionErrorGradHolder, dims=2);
detPiece = sum(detPieceHolder, dims=2);
diffGrad = (predictionErrorGrad - detPiece)/miniBatchSize;
#add in bias regularization gradient piece:
regularizationGrad = zeros(18);
for i in 1:size(regularizationGrad)[1]
if ((-p[diffStart+17+i] + biasRegularizationOffset) > 0)
regularizationGrad[i] = -1.0;
end
end
diffGrad[19:36] .+= biasRegularizationWeight*regularizationGrad;
end
grads = vcat(driftGrad, diffGrad);
grads = reshape(grads, size(grads)[1]);
return sumLoss,grads;
end
#covariance matrix is length(u)^2
initialParams = [];
if (driftLength > 0)
initialParams = vcat(initialParams, θ_f)
end
if (diffLength > 0)
initialParams = vcat(initialParams, ones(diffLength)); #currently using diagonal covariance
end
initialParams = Array{Float64}(initialParams);
@assert length(initialParams) == (driftLength + diffLength)
#--------------------end problem setup--------------------#
lProb = LearningProblem(initialConditions,
trainingData,
x,
y,
validationData,
tsteps,
miniBatchSize,
maxiters,
optimizer,
trueDrift,
trueDiff,
encode,
decode,
modelDrift,
modelDiff,
fullSystemDrift,
nothing,
lossAndGrads,
initialParams,
driftLength,
diffLength,
parameterizedDriftClosure,
parameterizedPrecisionMat);
println("Setup complete, beginning learning");
return lProb;
end
#prob = main(nothing);
#Learn(prob);
#myserialize("/groups/chertkov/cmhyett/DNSLearning/MLCoarseGrainedVGT/results/prob.dat",prob);
|
proofpile-julia0005-42862 | {
"provenance": "014.jsonl.gz:242863"
} | module Spatial
# types
export
CartesianFrame3D,
Transform3D,
FreeVector3D,
Point3D,
GeometricJacobian,
PointJacobian,
Twist,
SpatialAcceleration,
MomentumMatrix, # TODO: consider combining with WrenchMatrix
WrenchMatrix, # TODO: consider combining with MomentumMatrix
Momentum,
Wrench,
SpatialInertia
# functions
export
transform,
rotation,
translation,
angular,
linear,
point_velocity,
point_acceleration,
change_base,
log_with_time_derivative,
center_of_mass,
newton_euler,
torque,
torque!,
kinetic_energy,
rotation_vector_rate,
quaternion_derivative,
spquat_derivative,
angular_velocity_in_body,
velocity_jacobian,
linearized_rodrigues_vec
# macros
export
@framecheck
using LinearAlgebra
using Random
using StaticArrays
using Rotations
using DocStringExtensions
using Base: promote_eltype
include("frame.jl")
include("util.jl")
include("transform3d.jl")
include("threevectors.jl")
include("spatialmotion.jl")
include("spatialforce.jl")
include("motion_force_interaction.jl")
include("common.jl")
end # module
|
proofpile-julia0005-42863 | {
"provenance": "014.jsonl.gz:242864"
} | mutable struct CanvasPlot
# command list passed to javascript
jsdict::Dict{String,Any}
# size in canvas coordinates
w::Float64
h::Float64
# world coordinates
xmin::Float64
xmax::Float64
ymin::Float64
ymax::Float64
# transformation data
ax::Float64
bx::Float64
ay::Float64
by::Float64
# unique identifier of html entity
uuid::UUID
CanvasPlot(::Nothing)=new()
end
"""
````
CanvasPlot(;resolution=(300,300),
xrange::AbstractVector=0:1,
yrange::AbstractVector=0:1)
````
Create a canvas plot with given resolution in the notebook
and given "world coordinate" range.
"""
function CanvasPlot(;resolution=(300,300),
xrange::AbstractVector=0:1,
yrange::AbstractVector=0:1)
p=CanvasPlot(nothing)
p.uuid=uuid1()
p.jsdict=Dict{String,Any}("cmdcount" => 0)
p.w=resolution[1]
p.h=resolution[2]
_world!(p,extrema(xrange)...,extrema(yrange)...)
p
end
const canvasdraw = read(joinpath(@__DIR__, "..", "assets", "canvasdraw.js"), String)
"""
Show plot
"""
function Base.show(io::IO, ::MIME"text/html", p::CanvasPlot)
result="""
<script>
$(canvasdraw)
const jsdict = $(Main.PlutoRunner.publish_to_js(p.jsdict))
canvasdraw("$(p.uuid)",jsdict)
</script>
<canvas id="$(p.uuid)" width="$(p.w)" height="$(p.h)"></canvas>
"""
write(io,result)
end
"""
lines!(p::CanvasPlot,x,y)
Plot lines. Every two coordinates define a line.
"""
function lines!(p::CanvasPlot,x,y)
_poly!(p,"lines",x,y)
end
"""
polyline!(p::CanvasPlot,x,y)
Plot a polyline.
"""
function polyline!(p::CanvasPlot,x,y)
_poly!(p,"polyline",x,y)
end
"""
polygon!(p::CanvasPlot,x,y)
Plot a polygon and fill it.
"""
function polygon!(p::CanvasPlot,x,y)
_poly!(p,"polygon",x,y)
end
"""
linecolor!(p::CanvasPlot,r,g,b)
Set line color.
"""
function linecolor!(p::CanvasPlot,r,g,b)
pfx=command!(p,"linecolor")
p.jsdict[pfx*"_rgb"]=255*Float32[r,g,b]
end
"""
linewidth!(p::CanvasPlot,w)
Set line width in pixels
"""
function linewidth!(p::CanvasPlot,w)
pfx=command!(p,"linewidth")
p.jsdict[pfx*"_w"]=w
end
"""
fillcolor!(p::CanvasPlot,r,g,b)
Set polygon fill color.
"""
function fillcolor!(p::CanvasPlot,r,g,b)
pfx=command!(p,"fillcolor")
p.jsdict[pfx*"_rgb"]=255*Float32[r,g,b]
end
"""
textcolor!(p::CanvasPlot,r,g,b)
Set text color
"""
function textcolor!(p::CanvasPlot,r,g,b)
pfx=command!(p,"textcolor")
p.jsdict[pfx*"_rgb"]=255*Float32[r,g,b]
end
"""
textsize!(p::CanvasPlot,px)
Set text size in pixels
"""
function textsize!(p::CanvasPlot,px)
pfx=command!(p,"textsize")
p.jsdict[pfx*"_pt"]=px
end
const halign=Dict("c"=>"center",
"l"=>"left",
"r"=>"right")
const valign=Dict("b"=>"bottom",
"c"=>"middle",
"t"=>"top")
"""
textalign!(p::CanvasPlot,align)
Set text alignment.
`align:` one of `[:lt,:lc,lb,:ct,:cc,:cb,:rt,:rc,:rb]`
"""
function textalign!(p::CanvasPlot,align)
a=String(align)
pfx=command!(p,"textalign")
p.jsdict[pfx*"_align"]=halign[a[1:1]]
pfx=command!(p,"textbaseline")
p.jsdict[pfx*"_align"]=valign[a[2:2]]
end
"""
text!(p::CanvasPlot,txt,x,y)
Draw text at position x,y.
"""
function text!(p::CanvasPlot,txt,x,y)
pfx=command!(p,"text")
tx,ty=_tran2d(p,x,y)
p.jsdict[pfx*"_x"]=tx
p.jsdict[pfx*"_y"]=ty
p.jsdict[pfx*"_txt"]=txt
end
"""
axis!(p::CanvasPlot;
xtics=0:1,
ytics=0:1,
axislinewidth=2,
gridlinewidth=1.5,
ticlength=7,
ticsize=15,
xpad=30,
ypad=30)
Draw an axis with grid and tics, set new
world coordinates according to tics.
"""
function axis!(p::CanvasPlot;
xtics=0:1,
ytics=0:1,
axislinewidth=2,
gridlinewidth=1.5,
ticlength=7,
ticsize=15,
xpad=30,
ypad=30)
linecolor!(p,0,0,0)
xmin,xmax=extrema(xtics)
ymin,ymax=extrema(ytics)
world_ypad=ypad*(ymax-ymin)/p.h
world_xpad=xpad*(xmax-xmin)/p.w
_world!(p,xmin-world_xpad,xmax+world_xpad/2,ymin-world_ypad,ymax+world_ypad/2)
linewidth!(p,gridlinewidth)
linecolor!(p,0.85,0.85,0.85)
for y in ytics
lines!(p,[xmin,xmax],[y,y])
end
for x in xtics
lines!(p,[x,x],[ymin,ymax])
end
linewidth!(p,axislinewidth)
linecolor!(p,0,0,0)
lines!(p,[xmin,xmax],[ymin,ymin])
lines!(p,[xmin,xmax],[ymax,ymax])
lines!(p,[xmin,xmin],[ymin,ymax])
lines!(p,[xmax,xmax],[ymin,ymax])
linewidth!(p,gridlinewidth)
linecolor!(p,0,0,0)
world_xticlength=-ticlength/p.ay
world_yticlength=ticlength/p.ax
textcolor!(p,0,0,0)
textsize!(p,ticsize)
textalign!(p,:rc)
for y in ytics
lines!(p,[xmin-world_yticlength,xmin],[y,y])
text!(p, string(y), xmin-world_yticlength,y)
end
textalign!(p,:ct)
for x in xtics
lines!(p,[x,x],[ymin-world_xticlength,ymin])
text!(p, string(x), x,ymin-world_xticlength)
end
end
|
proofpile-julia0005-42864 | {
"provenance": "014.jsonl.gz:242865"
} | # basic test that parsing works correctly
testdir = dirname(@__FILE__)
@test_throws Gumbo.InvalidHTMLException parsehtml("", strict=true)
let
page = open("$testdir/example.html") do example
example |> readstring |> parsehtml
end
@test page.doctype == "html"
root = page.root
@test tag(root[1][1]) == :meta
@test root[2][1][1].text == "A simple test page."
@test is(root[2][1][1].parent, root[2][1])
end
# test that nonexistant tags are parsed as their actual name and not "unknown"
let
page = parsehtml("<weird></weird")
@test tag(page.root[2][1]) == :weird
end
|
proofpile-julia0005-42865 | {
"provenance": "014.jsonl.gz:242866"
} | # This file is part of Fatou.jl. It is licensed under the MIT license
# Copyright (C) 2017 Michael Reed
import Base: invokelatest
rdpm(tex) = split(split(tex,"\n\\end{displaymath}")[1],"\\begin{displaymath}\n")[2]
# we can substitute the expression into Newton's method and display it with LaTeX
function newton_raphson(F,m)
f = RExpr(F)
return Algebra.:-(R"z",Algebra.:*(m,Algebra.:/(f,Algebra.df(f,:z)))) |> factor |> parse
end
# define recursive composition on functions
recomp(E,x,j::Int) = Algebra.sub((Expr(:(=),:z,j > 1 ? recomp(E,x,j-1) : x),:(c=0)),E)
# we can convert the j-th function composition into a latex expresion
nL(E,m,j) = recomp(newton_raphson(E,m),:z,j) |> Algebra.latex |> rdpm
jL(E,j) = recomp(E,:z,j) |> Algebra.latex |> rdpm
# set of points that are within an ϵ neighborhood of the roots ri of the function f
ds = "\\displaystyle"
set0 = "D_0(\\epsilon) = \\left\\{ z\\in\\mathbb{C}: \\left|\\,z"
setj(j) = "$ds D_$j(\\epsilon) = \\left\\{z\\in\\mathbb{C}:\\left|\\,"
nsetstr = "- r_i\\,\\right|<\\epsilon,\\,\\forall r_i(\\,f(r_i)=0 )\\right\\}"
jsetstr = "\\,\\right|>\\epsilon\\right\\}"
nset0 = latexstring("$set0 $nsetstr")
jset0 = latexstring("$set0 $jsetstr")
nrset(f,m,j) = latexstring(
j == 0 ? "$set0 $nsetstr" : "$(setj(j))$(nL(f,m,j)) $nsetstr")
jset(f,j) = latexstring(
j == 0 ? "$set0 $jsetstr" : "$(setj(j))$(jL(f,j)) $jsetstr")
|
proofpile-julia0005-42866 | {
"provenance": "014.jsonl.gz:242867"
} | """
mutable struct DynamicGenerator{
M <: Machine,
S <: Shaft,
A <: AVR,
TG <: TurbineGov,
P <: PSS,
} <: DynamicInjection
name::String
ω_ref::Float64
machine::M
shaft::S
avr::A
prime_mover::TG
pss::P
n_states::Int
states::Vector{Symbol}
ext::Dict{String, Any}
internal::InfrastructureSystemsInternal
end
A dynamic generator is composed by 5 components, namely a Machine, a Shaft, an Automatic Voltage Regulator (AVR),
a Prime Mover (o Turbine Governor) and Power System Stabilizer (PSS). It requires a Static Injection device that is attached to it.
# Arguments
- `name::String`: Name of generator.
- `ω_ref::Float64`: Frequency reference set-point in pu.
- `machine <: Machine`: Machine model for modeling the electro-magnetic phenomena.
- `shaft <: Shaft`: Shaft model for modeling the electro-mechanical phenomena.
- `avr <: AVR`: AVR model of the excitacion system.
- `prime_mover <: TurbineGov`: Prime Mover and Turbine Governor model for mechanical power.
- `pss <: PSS`: Power System Stabilizer model.
- `n_states::Int`: Number of states (will depend on the components).
- `states::Vector{Symbol}`: Vector of states (will depend on the components).
- `ext::Dict{String, Any}`
- `internal::InfrastructureSystemsInternal`: power system internal reference, do not modify
"""
mutable struct DynamicGenerator{
M <: Machine,
S <: Shaft,
A <: AVR,
TG <: TurbineGov,
P <: PSS,
} <: DynamicInjection
name::String
ω_ref::Float64
machine::M
shaft::S
avr::A
prime_mover::TG
pss::P
n_states::Int
states::Vector{Symbol}
ext::Dict{String, Any}
internal::InfrastructureSystemsInternal
end
function DynamicGenerator(
name::String,
ω_ref::Float64,
machine::M,
shaft::S,
avr::A,
prime_mover::TG,
pss::P,
ext::Dict{String, Any} = Dict{String, Any}(),
) where {M <: Machine, S <: Shaft, A <: AVR, TG <: TurbineGov, P <: PSS}
n_states = _calc_n_states(machine, shaft, avr, prime_mover, pss)
states = _calc_states(machine, shaft, avr, prime_mover, pss)
return DynamicGenerator{M, S, A, TG, P}(
name,
ω_ref,
machine,
shaft,
avr,
prime_mover,
pss,
n_states,
states,
ext,
InfrastructureSystemsInternal(),
)
end
function DynamicGenerator(;
name::String,
ω_ref::Float64,
machine::M,
shaft::S,
avr::A,
prime_mover::TG,
pss::P,
n_states = _calc_n_states(machine, shaft, avr, prime_mover, pss),
states = _calc_states(machine, shaft, avr, prime_mover, pss),
ext::Dict{String, Any} = Dict{String, Any}(),
internal = InfrastructureSystemsInternal(),
) where {M <: Machine, S <: Shaft, A <: AVR, TG <: TurbineGov, P <: PSS}
DynamicGenerator(
name,
ω_ref,
machine,
shaft,
avr,
prime_mover,
pss,
n_states,
states,
ext,
internal,
)
end
IS.get_name(device::DynamicGenerator) = device.name
get_states(device::DynamicGenerator) = device.states
get_n_states(device::DynamicGenerator) = device.n_states
get_ω_ref(device::DynamicGenerator) = device.ω_ref
get_machine(device::DynamicGenerator) = device.machine
get_shaft(device::DynamicGenerator) = device.shaft
get_avr(device::DynamicGenerator) = device.avr
get_prime_mover(device::DynamicGenerator) = device.prime_mover
get_pss(device::DynamicGenerator) = device.pss
get_ext(device::DynamicGenerator) = device.ext
get_internal(device::DynamicGenerator) = device.internal
get_V_ref(value::DynamicGenerator) = get_V_ref(get_avr(value))
get_P_ref(value::DynamicGenerator) = get_P_ref(get_prime_mover(value))
function _calc_n_states(machine, shaft, avr, prime_mover, pss)
return get_n_states(machine) +
get_n_states(shaft) +
get_n_states(avr) +
get_n_states(prime_mover) +
get_n_states(pss)
end
function _calc_states(machine, shaft, avr, prime_mover, pss)
return vcat(
get_states(machine),
get_states(shaft),
get_states(avr),
get_states(prime_mover),
get_states(pss),
)
end
|
proofpile-julia0005-42867 | {
"provenance": "014.jsonl.gz:242868"
} | module MitosisStochasticDiffEq
using Mitosis
using RecursiveArrayTools
using StochasticDiffEq
using OrdinaryDiffEq
using DiffEqNoiseProcess
import DiffEqNoiseProcess.pCN
using LinearAlgebra
using Random
using UnPack
using Statistics
using StaticArrays
using ForwardDiff
using PaddedViews
import SciMLBase.isinplace
export pCN
include("types.jl")
include("sample.jl")
include("filter.jl")
include("guiding.jl")
include("regression.jl")
include("utils.jl")
include("derivative_utils.jl")
include("solver.jl")
end
|
proofpile-julia0005-42868 | {
"provenance": "014.jsonl.gz:242869"
} | export chebyshev, chebyshev!
chebyshev(A, b, λmin::Real, λmax::Real, Pr = 1, n = size(A,2);
tol::Real = sqrt(eps(typeof(real(b[1])))), maxiter::Int = n^3) =
chebyshev!(zerox(A, b), A, b, λmin, λmax, Pr, n; tol=tol,maxiter=maxiter)
function chebyshev!(x, A, b, λmin::Real, λmax::Real, Pr = 1, n = size(A,2);
tol::Real = sqrt(eps(typeof(real(b[1])))), maxiter::Int = n^3)
K = KrylovSubspace(A, n, 1, Adivtype(A, b))
init!(K, x)
chebyshev!(x, K, b, λmin, λmax, Pr; tol = tol, maxiter = maxiter)
end
function chebyshev!(x, K::KrylovSubspace, b, λmin::Real, λmax::Real, Pr = 1;
tol::Real = sqrt(eps(typeof(real(b[1])))), maxiter::Int = K.n^3)
local α, p
K.order = 1
tol = tol*norm(b)
r = b - nextvec(K)
d::eltype(b) = (λmax + λmin)/2
c::eltype(b) = (λmax - λmin)/2
resnorms = zeros(typeof(real(b[1])), maxiter)
for iter = 1:maxiter
z = Pr\r
if iter == 1
p = z
α = 2/d
else
β = (c*α/2)^2
α = 1/(d - β)
p = z + β*p
end
append!(K, p)
update!(x, α, p)
r -= α*nextvec(K)
#Check convergence
resnorms[iter] = norm(r)
if resnorms[iter] < tol
resnorms = resnorms[1:iter]
break
end
end
x, ConvergenceHistory(resnorms[end] < tol, tol, K.mvps, resnorms)
end
|
proofpile-julia0005-42869 | {
"provenance": "014.jsonl.gz:242870"
} | using CompScienceMeshes, BEAST
fn = joinpath(dirname(pathof(BEAST)),"../examples/assets/torus.msh")
m = CompScienceMeshes.read_gmsh_mesh(fn)
@show numcells(m)
X = raviartthomas(m)
@show numfunctions(X)
Y = buffachristiansen(m)
@show numfunctions(Y)
verts = skeleton(m,0)
edges = skeleton(m,1)
faces = skeleton(m,2)
Λ = connectivity(verts, edges)
Σᵀ = connectivity(edges, faces)
@assert all(sum(Σᵀ,dims=1) .== 0)
κ = 1.0e-5
S = Maxwell3D.singlelayer(wavenumber=κ)
D = Maxwell3D.doublelayer(wavenumber=κ)
C = BEAST.DoubleLayerRotatedMW3D(im*κ)
J = BEAST.Identity()
N = BEAST.NCross()
# Sxx = assemble(S,X,X)
# Myx = assemble(D+0.5N,Y,X)
# Mxx = assemble(C-0.5J,X,X)
using JLD2
@load "temp/matrices.jld2"
using LinearAlgebra
norm(Σᵀ*Myx*Λ)
ϵ, μ = 1.0, 1.0
ω = κ / √(ϵ*μ)
E = Maxwell3D.planewave(direction=ẑ, polarization=x̂, wavenumber=κ)
H = -1/(im*μ*ω)*curl(E)
e = (n × E) × n
h = (n × H) × n
ex = assemble(e, X)
hy = assemble(h, Y)
hx = assemble(n × H,X)
u1 = Sxx \ ex
u2 = Myx \ hy
u3 = Mxx \ hx
Φ, Θ = [0.0], range(0,stop=π,length=100)
pts = [point(cos(ϕ)*sin(θ), sin(ϕ)*sin(θ), cos(θ)) for ϕ in Φ for θ in Θ]
ff1 = potential(MWFarField3D(wavenumber=κ), pts, u1, X)
ff2 = potential(MWFarField3D(wavenumber=κ), pts, u2, X)
ff3 = potential(MWFarField3D(wavenumber=κ), pts, u3, X)
#Why: projectors
Σ = copy(transpose(Σᵀ))
Ps = Σ*pinv(Array(Σᵀ*Σ))*Σᵀ
Pl = I - Ps
u1s = Ps*u1
u2s = Ps*u2
u3s = Ps*u3
u1l = Pl*u1
u2l = Pl*u2
u3l = Pl*u3
# s = svdvals(M)
# h = nullspace(M,s[end]*1.0001)
#
# fcr, geo = facecurrents(h[:,end],X)
#
# V,F = vertexarray(m), cellarray(m)
# B = [real(f[i]) for f in fcr, i in 1:3]
# using MATLAB
# mat"mesh = Mesh($V,$F)"
# mat"[C,N] = faceNormals(mesh)"
# mat"figure; hold on"
# mat"patch(mesh,$(norm.(fcr)))"
# mat"quiver3x(C,$B)"
|
proofpile-julia0005-42870 | {
"provenance": "014.jsonl.gz:242871"
} | l=[
"l=[","print(l[1]",
"for i in l print(\" \$(repr(i))\") end",
"print(l[end])",
"for i in l[2:end-1] print(\"\$i\") end",
"]",
]
println(l[1])
for i in l println(" $(repr(i)),") end
println(l[end])
for i in l[2:end-1] println("$i") end |
proofpile-julia0005-42871 | {
"provenance": "014.jsonl.gz:242872"
} | import ProximalBundleMethod
import Printf
import Convex
import Gurobi
import SCS
using LinearAlgebra
using Random
using SparseArrays
include("utils.jl")
include("csv_exporter.jl")
include("plot_utils.jl")
"""
The following functions are used to process LIBSVM files.
"""
mutable struct LearningData
feature_matrix::SparseMatrixCSC{Float64,Int64}
labels::Vector{Float64}
end
function load_libsvm_file(file_name::String)
open(file_name, "r") do io
target = Array{Float64,1}()
row_indicies = Array{Int64,1}()
col_indicies = Array{Int64,1}()
matrix_values = Array{Float64,1}()
row_index = 0
for line in eachline(io)
row_index += 1
split_line = split(line)
label = parse(Float64, split_line[1])
# This ensures that labels are 1 or -1. Different dataset use {-1, 1}, {0, 1}, and {1, 2}.
if abs(label - 1.0) < 1e-05
label = 1.0
else
label = -1.0
end
push!(target, label)
for i = 2:length(split_line)
push!(row_indicies, row_index)
matrix_coef = split(split_line[i], ":")
push!(col_indicies, parse(Int64, matrix_coef[1]))
push!(matrix_values, parse(Float64, matrix_coef[2]))
end
end
feature_matrix = sparse(row_indicies, col_indicies, matrix_values)
return LearningData(feature_matrix, target)
end
end
function normalize_columns(feature_matrix::SparseMatrixCSC{Float64,Int64})
m = size(feature_matrix, 2)
normalize_columns_by = ones(m)
for j = 1:m
col_vals = feature_matrix[:, j].nzval
if length(col_vals) > 0
normalize_columns_by[j] = 1.0 / norm(col_vals, 2)
end
end
return feature_matrix * sparse(1:m, 1:m, normalize_columns_by)
end
function remove_empty_columns(feature_matrix::SparseMatrixCSC{Float64,Int64})
keep_cols = Array{Int64,1}()
for j = 1:size(feature_matrix, 2)
if length(feature_matrix[:, j].nzind) > 0
push!(keep_cols, j)
end
end
return feature_matrix[:, keep_cols]
end
function add_intercept(feature_matrix::SparseMatrixCSC{Float64,Int64})
return [sparse(ones(size(feature_matrix, 1))) feature_matrix]
end
function preprocess_learning_data(result::LearningData)
result.feature_matrix = remove_empty_columns(result.feature_matrix)
result.feature_matrix = add_intercept(result.feature_matrix)
result.feature_matrix = normalize_columns(result.feature_matrix)
return result
end
function svm_objective(w, Xp, y, coeff)
n, _ = size(Xp)
soft_margin = map(x -> max(0, x), ones(length(y)) - y .* (Xp * w))
return sum(soft_margin) / n + coeff / 2 * LinearAlgebra.norm(w)^2
end
function svm_subgradient(w, Xp, y, coeff)
n, m = size(Xp)
soft_margin = map(x -> max(0, x), ones(length(y)) - y .* (Xp * w))
result = zeros(length(w))
for i = 1:length(soft_margin)
if soft_margin[i] > 0.0
result += -y[i] * Xp[i, :] / n
end
end
return result + coeff * w
end
function compute_minimum_with_cvx(Xp, y, reg_coeff)
n, m = size(Xp)
w = Convex.Variable(m)
problem = Convex.minimize(
(reg_coeff / 2) * Convex.sumsquares(w) +
Convex.sum(Convex.pos((1 - y .* (Xp * w)) / n)),
)
Convex.solve!(
problem,
() -> Gurobi.Optimizer(BarConvTol = 1e-10, BarQCPConvTol = 1e-10),
) #() -> SCS.Optimizer(verbose=false), verbose=false)
return problem
end
function main()
Random.seed!(2625)
instance_names = ["colon-cancer", "duke", "leu"]
coefficients = [0.001, 0.01, 0.1, 0.5, 1.0, 1.5, 2.0, 10.0]
step_sizes = [1e-15 * 100^j for j = 0:10]
instances = Dict()
for name in instance_names
instances[name] = preprocess_learning_data(load_libsvm_file("data/" * name))
end
errors = DataFrame(
instance = String[],
coeff = Float64[],
error_subg = Float64[],
error_bundle = Float64[],
)
params_subgradient = create_subgradient_method_parameters(
iteration_limit = 2000,
verbose = true,
printing_frequency = 100,
)
params_bundle = create_bundle_method_parameters(
iteration_limit = 2000,
verbose = true,
printing_frequency = 100,
full_memory = false,
)
for (name, instance) in instances
Xp = (instance.feature_matrix)
y = instance.labels
_, m = size(Xp)
x_init = randn(m) / m
for coeff in coefficients
print("\n------------------------------------\n")
Printf.@printf("Instance %s, coeff %12g\n", name, coeff)
println("Solving with Convex.jl first")
problem = compute_minimum_with_cvx(Xp, y, coeff)
Printf.@printf("Obj=%12g\n", problem.optval,)
objective = (w -> svm_objective(w, Xp, y, coeff) - problem.optval)
gradient = (w -> svm_subgradient(w, Xp, y, coeff))
poly_step_size = ((_, _, t) -> 1 / (coeff * t))
println("\nAbout to solve a random SVM problem using subgradient method.")
sol_subgradient = ProximalBundleMethod.solve(
objective,
gradient,
params_subgradient,
poly_step_size,
x_init,
)
println("\nAbout to solve a random SVM problem using adaptive parallel method.")
sol_bundle, iter_info_agents = ProximalBundleMethod.solve_adaptive(
objective,
gradient,
params_bundle,
step_sizes,
x_init,
)
push!(
errors,
[
name,
coeff,
objective(sol_subgradient.solution),
objective(sol_bundle.solution),
],
)
# Uncomment this block of code if you want to save stats about each run.
# csv_path_sol =
# "results/svm/results_subgradient_" * name * "_" * string(coeff) * "_svm.csv"
# output_stepsize_plot =
# "results/svm/results_subgradient_" * name * "_" *
# string(coeff) *
# "_stepsize_svm.pdf"
# output_objective_plot =
# "results/svm/results_subgradient_" * name * "_" *
# string(coeff) *
# "_objective_svm.pdf"
# export_statistics(sol_subgradient, csv_path_sol)
# plot_step_sizes(csv_path_sol, output_stepsize_plot)
# plot_objective(csv_path_sol, output_objective_plot)
# csv_path_sol =
# "results/svm/results_bundle_" * name * "_" * string(coeff) * "_svm.csv"
# output_stepsize_plot =
# "results/svm/results_bundle_" * name * "_" *
# string(coeff) *
# "_stepsize_svm.pdf"
# output_objective_plot =
# "results/svm/results_bundle_" * name * "_" *
# string(coeff) *
# "_objective_svm.pdf"
# csv_path =
# "results/svm/results_" * name * "_" * string(coeff) * "_agents_svm.csv"
# output_path =
# "results/svm/results_" * name * "_" * string(coeff) * "_agents_svm.pdf"
# export_statistics(sol_bundle, csv_path_sol)
# plot_step_sizes(csv_path_sol, output_stepsize_plot)
# plot_objective(csv_path_sol, output_objective_plot)
# export_losses_agents(iter_info_agents, step_sizes, csv_path)
# plot_agent_losses(csv_path, output_path)
end
end
print(errors)
CSV.write("results/svm/results_svm.csv", errors)
end
main()
|
proofpile-julia0005-42872 | {
"provenance": "014.jsonl.gz:242873"
} | type Trie{T}
value::T
children::Dict{Char,Trie{T}}
is_key::Bool
function Trie()
self = new()
self.children = (Char=>Trie{T})[]
self.is_key = false
self
end
end
Trie() = Trie{Any}()
function setindex!{T}(t::Trie{T}, val::T, key::String)
node = t
for char in key
if !haskey(node.children, char)
node.children[char] = Trie{T}()
end
node = node.children[char]
end
node.is_key = true
node.value = val
end
function subtrie(t::Trie, prefix::String)
node = t
for char in prefix
if !haskey(node.children, char)
return nothing
else
node = node.children[char]
end
end
node
end
function haskey(t::Trie, key::String)
node = subtrie(t, key)
node != nothing && node.is_key
end
get(t::Trie, key::String) = get(t, key, nothing)
function get(t::Trie, key::String, notfound)
node = subtrie(t, key)
if node != nothing && node.is_key
return node.value
end
notfound
end
function keys(t::Trie, prefix::String, found)
if t.is_key
push(found, prefix)
end
for (char,child) in t.children
keys(child, strcat(prefix,char), found)
end
end
keys(t::Trie, prefix::String) = (found=String[]; keys(t, prefix, found); found)
keys(t::Trie) = keys(t, "")
function keys_with_prefix(t::Trie, prefix::String)
st = subtrie(t, prefix)
st != nothing ? keys(st,prefix) : []
end
|
proofpile-julia0005-42873 | {
"provenance": "014.jsonl.gz:242874"
} | using CImGui
using CImGui.CSyntax
using CImGui.CSyntax.CSwitch
using CImGui.CSyntax.CStatic
using Printf
"""
ShowExampleAppLongText(p_open::Ref{Bool})
Test rendering huge amount of text, and the incidence of clipping.
"""
function ShowExampleAppLongText(p_open::Ref{Bool})
CImGui.SetNextWindowSize((520,600), CImGui.ImGuiCond_FirstUseEver)
CImGui.Begin("Example: Long text display", p_open) || (CImGui.End(); return)
@cstatic test_type=Cint(0) lines=0 log=CImGui.TextBuffer() begin
CImGui.Text("Printing unusually long amount of text.")
@c CImGui.Combo("Test type", &test_type, "Single call to TextUnformatted()\0Multiple calls to Text(), clipped manually\0Multiple calls to Text(), not clipped (slow)\0")
CImGui.Text("Buffer contents: $lines lines, $(CImGui.Size(log)) bytes")
if CImGui.Button("Clear")
@info "Trigger Clear | find me here: $(@__FILE__) at line $(@__LINE__)"
CImGui.Clear(log)
lines = 0
end
CImGui.SameLine()
if CImGui.Button("Add 1000 lines")
@info "Trigger Add 1000 lines | find me here: $(@__FILE__) at line $(@__LINE__)"
foreach(i->CImGui.Append(log, "$(lines+i) The quick brown fox jumps over the lazy dog\n"), 1:1000)
lines += 1000
end
CImGui.BeginChild("Log")
@cswitch test_type begin
@case 0
# single call to TextUnformatted() with a big buffer
CImGui.TextUnformatted(CImGui.Begin(log), CImGui.End(log))
break
@case 1
# multiple calls to Text(), manually coarsely clipped - demonstrate how to use the ImGuiListClipper helper.
CImGui.PushStyleVar(CImGui.ImGuiStyleVar_ItemSpacing, (0,0))
clipper = CImGui.Clipper()
CImGui.Begin(clipper, lines)
while CImGui.Step(clipper)
s = CImGui.Get(clipper, :DisplayStart)
e = CImGui.Get(clipper, :DisplayEnd)-1
foreach(i->CImGui.Text("$i The quick brown fox jumps over the lazy dog"), s:e)
end
CImGui.PopStyleVar()
CImGui.Destroy(clipper)
break
@case 2
# multiple calls to Text(), not clipped (slow)
CImGui.PushStyleVar(CImGui.ImGuiStyleVar_ItemSpacing, (0,0))
foreach(i->CImGui.Text("$i The quick brown fox jumps over the lazy dog"), 1:lines)
CImGui.PopStyleVar()
break
end
CImGui.EndChild()
CImGui.End()
end # @cstatic
end
|
proofpile-julia0005-42874 | {
"provenance": "014.jsonl.gz:242875"
} | using HypothesisTests, Test
using HypothesisTests: default_tail
using StableRNGs
@testset "F-tests" begin
@testset "Basic variance F-test" begin
rng = StableRNG(12)
y1_h0 = 4 .+ randn(rng, 500)
y2_h0 = 4 .+ randn(rng, 400)
t = VarianceFTest(y1_h0, y2_h0)
@test t.n_x == 500
@test t.n_y == 400
@test t.df_x == 499
@test t.df_y == 399
@test t.F ≈ 0.859582 rtol=1e-5
@test pvalue(t) ≈ 0.109714 rtol=1e-5
@test pvalue(t, tail=:left) ≈ 0.0548572 rtol=1e-5
@test pvalue(t, tail=:right) ≈ 0.945143 rtol=1e-5
@test default_tail(t) == :both
t = VarianceFTest(y2_h0, y1_h0)
@test t.n_x == 400
@test t.n_y == 500
@test t.df_x == 399
@test t.df_y == 499
@test t.F ≈ 1.163355 rtol=1e-5
@test pvalue(t) ≈ 0.109714 rtol=1e-5
@test pvalue(t, tail=:right) ≈ 0.0548572 rtol=1e-5
@test pvalue(t, tail=:left) ≈ 0.945143 rtol=1e-5
@test default_tail(t) == :both
y1_h1 = 0.7*randn(rng, 200)
y2_h1 = 1.3*randn(rng, 120)
t = VarianceFTest(y1_h1, y2_h1)
@test t.n_x == 200
@test t.n_y == 120
@test t.df_x == 199
@test t.df_y == 119
@test t.F ≈ 0.264161 rtol=1e-5
@test pvalue(t) < 1e-8
@test default_tail(t) == :both
@test pvalue(t, tail=:left) < 1e-8
@test pvalue(t, tail=:right) > 1.0 - 1e-8
end
end
|
proofpile-julia0005-42875 | {
"provenance": "014.jsonl.gz:242876"
} | using Random: randperm!
using HDF5
"""
MODIFIED DataLoader Class
Modifications:
- Altered "Base.iterate" implementation to handle hdf5 (.h5) files
- Files are only read from disk when they're needed
Original version: https://github.com/boathit/Benchmark-Flux-PyTorch/blob/master/dataloader.jl
Original documentation:
DataLoader(dataset::AbstractArray...; batchsize::Int, shuffle::Bool)
DataLoader provides iterators over the dataset.
```julia
X = rand(10, 1000)
Y = rand(1, 1000)
m = Dense(10, 1)
loss(x, y) = Flux.mse(m(x), y)
opt = ADAM(params(m))
trainloader = DataLoader(X, Y, batchsize=256, shuffle=true)
Flux.train!(loss, trainloader, opt)
```
"""
struct DataLoader
dataset::Tuple
batchsize::Int
shuffle::Bool
indices::Vector{Int}
n::Int
end
function DataLoader(
dataset::Tuple{AbstractArray,Vararg{AbstractArray}};
batchsize::Int,
shuffle::Bool,
)
l = last.(size.(dataset))
n = first(l)
all(n .== l) || throw(DimensionMismatch("All data should have the same length."))
indices = collect(1:n)
shuffle && randperm!(indices)
DataLoader(dataset, batchsize, shuffle, indices, n)
end
DataLoader(dataset::AbstractArray...; batchsize::Int, shuffle::Bool) =
DataLoader(dataset, batchsize = batchsize, shuffle = shuffle)
function Base.iterate(it::DataLoader, start = 1)
if start > it.n
it.shuffle && randperm!(it.indices)
return nothing
end
nextstart = min(start + it.batchsize, it.n + 1)
i = it.indices[start:nextstart-1]
# Select batch data
raw_batch = Tuple(copy(selectdim(x, ndims(x), i)) for x in it.dataset)
# Prepare empty batch arrays of size (dim1, dim2 .... dimN, 1)
# Added dimension: index in batch
X_batch = Array{Float32}(undef, 40, 40, 4, it.batchsize)
Y_batch = Array{Float32}(undef, 1, it.batchsize)
for i in 1:length(raw_batch[1]) # last batch could be smaller than (it.batchsize)
# raw_batch[1][i] here includes the path to the h5 file to be read
# img = permutedims(cpu(h5open(raw_batch[1][i], "r"))["data"][1:4,:,1:40], [2, 3, 1])
f = cpu(h5open(raw_batch[1][i], "r"))
img = permutedims(f["data"][1:4,:,1:40], [2, 3, 1])
close(f)
X_batch[:, :, :, i] = img
depth = raw_batch[2][i]
Y_batch[1, i] = depth
end
new_element = (X_batch, Y_batch)
return gpu(new_element), nextstart
end
Base.length(it::DataLoader) = it.n
Base.eltype(it::DataLoader) = typeof(it.dataset)
function Base.show(io::IO, it::DataLoader)
print(io, "DataLoader(dataset size = $(it.n)")
print(io, ", batchsize = $(it.batchsize), shuffle = $(it.shuffle)")
print(io, ")")
end
|
proofpile-julia0005-42876 | {
"provenance": "014.jsonl.gz:242877"
} | # ---------------------------------------------------------------------------- #
#
# master.jl
#
# Abstract master element type
# This allows for writing an n-dimensional version of solver methods
#
# λυτέος
# Fall 2017
#
# Max Opgenoord
#
# ---------------------------------------------------------------------------- #
"""
Master
Master abstract type:
Overarching abstract type for master element types. Currently triangle and
tetrahedron implemented.
"""
abstract type Master
end
include("../integration/quadratureLine.jl")
include("../integration/quadratureTriangle.jl")
include("../integration/quadratureTet.jl")
include("../integration/basisFuncLineLag.jl")
include("../integration/basisFuncTriangleLag.jl")
include("../integration/basisFuncTetLag.jl")
include("master2D.jl")
include("master3D.jl")
|
proofpile-julia0005-42877 | {
"provenance": "014.jsonl.gz:242878"
} | function Temperature_dist(T,theta_l,theta_v,q_l,q_v,rho_vs,soil_parameters,
Ta, Hr_atm, St,u, psi_H, psi_m, soil_numerical_parameters,constants,atm_parameters,dt)
theta_top = theta_l[end]; # top layer water content
rs = 10*exp(35.63*(0.15-theta_top)); # soil surface resistance to vapor flow
rv = 1/(u*constants.k^2)*(log((atm_parameters.z_ref-atm_parameters.d+atm_parameters.z_H)/atm_parameters.z_oH)+psi_H)*
(log((atm_parameters.z_ref-atm_parameters.d+atm_parameters.z_m)/atm_parameters.z_om)+psi_m);
rH = rv;
e_a = 0.611*exp(17.27*(Ta-273.15)/(Ta-35.85))*Hr_atm; # atm. vapor pressure
rho_sa = 1e-3.*exp(19.84-4975.9./Ta);
rho_va = rho_sa*Hr_atm;
eps_a = 0.7+5.95e-5*e_a*exp(1500/Ta); # atm. emmisivity;
eps_s = min(0.9+0.18*theta_top,1); #soil emmisivity
Cp = constants.Cs.*(soil_parameters.theta_s.-theta_l)+constants.Cw.*theta_l.+constants.Cv.*theta_v;
lambda = soil_parameters.b1.+soil_parameters.b2.*theta_l+soil_parameters.b3.*sqrt.(theta_l);
rho_w = 1000 .-7.3e-3.*(T.-(273+4)).^2 .+ 3.79e-5.*(T.-(273+4)).^3; #water density [kg m^-3]
Lw = 2.501e6 .- 2369.2.*(T .- 273); # latent heat [J kg^-1]
Lo = Lw.*rho_w; # vol. latent heat [J m^-3]
L = 2.260e6; # latent heat for vaporazation [J kg^-1]
T_old = T;
O=zeros(2)
O[1] = 1;
LE=0
while O[1]>soil_numerical_parameters.eps_T
temp_T_2 = T;
temp_T = (T+T_old)./2;
for i = 2:soil_numerical_parameters.Ns-1
T[i] = T_old[i]+dt/(soil_numerical_parameters.dz_s^2*Cp[i])*(lambda[i]*(temp_T[i+1]-2*temp_T[i]+temp_T[i-1])+
(lambda[i+1]-lambda[i])*(temp_T[i+1]-temp_T[i])-(constants.Cw*q_l[i]+constants.Cv*q_v[i])*soil_numerical_parameters.dz_s*(temp_T[i+1]-temp_T[i]));
end
#set bottom boundary
T[1] = T_old[1];
#set top boundary
Rn = (1-albedo)*St+eps_s*eps_a*constants.sigma*Ta^4-
eps_s*constants.sigma*temp_T[end]^4;
H = constants.Ca*(temp_T[end]-Ta)/rH;
LE = L.*(rho_vs[end].-rho_va)./(rv.+rs);
G = Rn.-H.-LE;
T[end] = (G+T[end-1]*lambda[end]/soil_numerical_parameters.dz_s+Lo[end]*q_v[end])/(lambda[end]/soil_numerical_parameters.dz_s-constants.Cv*q_v[end]-constants.Cw*q_l[end]);
rho_w = 1000 .- 7.3e-3.*(T.-(273+4)).^2 .+ 3.79e-5.*(T.-(273+4)).^3; #water density [kg m^-3]
Lw = 2.501e6.-2369.2.*(T.-273); # latent heat [J kg^-1]
Lo = Lw.*rho_w; # vol. latent heat [J m^-3]
O[2] = mean(abs.(temp_T_2.-T)./temp_T);
if O[2]<O[1]
O[1] = O[2];
else
dt = dt/2;
T = T_old;
O[1] = 1;
end
if dt==0
dt = 1;
end
#println(dt)
end
E = LE/(L*rho_w[end]);
return T,E,dt
end
|
proofpile-julia0005-42878 | {
"provenance": "014.jsonl.gz:242879"
} | "`base(::Type{T}, i)` singleton (`i`,) of collection type `T` "
function base end
"Id of Random Variable in projection"
function randvarid end
"`combine(a, b)` Combine (e.g. concatenate) `a` and `b`"
function combine end
"`append(a, b)` append `b` to the end of `a`, like `push!` but functional"
function append end
"`Increment(id::ID)` the id"
function increment end |
proofpile-julia0005-42879 | {
"provenance": "014.jsonl.gz:242880"
} | import Base: isidentifier, is_id_start_char, is_id_char
const RESERVED_WORDS = Set(["begin", "while", "if", "for", "try",
"return", "break", "continue", "function", "macro", "quote", "let",
"local", "global", "const", "abstract", "typealias", "type", "bitstype",
"immutable", "do", "module", "baremodule", "using", "import",
"export", "importall", "end", "else", "elseif", "catch", "finally"])
VERSION < v"0.6.0-dev.2194" && push!(RESERVED_WORDS, "ccall")
VERSION >= v"0.6.0-dev.2698" && push!(RESERVED_WORDS, "struct")
function identifier(s::AbstractString)
s = normalize_string(s)
if !isidentifier(s)
s = makeidentifier(s)
end
@compat(Symbol(in(s, RESERVED_WORDS) ? "_"*s : s))
end
function makeidentifier(s::AbstractString)
i = start(s)
done(s, i) && return "x"
res = IOBuffer(sizeof(s) + 1)
(c, i) = next(s, i)
under = if is_id_start_char(c)
write(res, c)
c == '_'
elseif is_id_char(c)
write(res, 'x', c)
false
else
write(res, '_')
true
end
while !done(s, i)
(c, i) = next(s, i)
if c != '_' && is_id_char(c)
write(res, c)
under = false
elseif !under
write(res, '_')
under = true
end
end
return String(take!(res))
end
function make_unique(names::Vector{Symbol}; allow_duplicates=true)
seen = Set{Symbol}()
names = copy(names)
dups = Int[]
for i in 1:length(names)
name = names[i]
in(name, seen) ? push!(dups, i) : push!(seen, name)
end
if !allow_duplicates && length(dups) > 0
d = unique(names[dups])
msg = """Duplicate variable names: $d.
Pass allow_duplicates=true to make them unique using a suffix automatically."""
throw(ArgumentError(msg))
end
for i in dups
nm = names[i]
k = 1
while true
newnm = Symbol("$(nm)_$k")
if !in(newnm, seen)
names[i] = newnm
push!(seen, newnm)
break
end
k += 1
end
end
return names
end
#' @description
#'
#' Generate standardized names for columns of a DataTable. The
#' first name will be :x1, the second :x2, etc.
#'
#' @field n::Integer The number of names to generate.
#'
#' @returns names::Vector{Symbol} A vector of standardized column names.
#'
#' @examples
#'
#' DataTables.gennames(10)
function gennames(n::Integer)
res = Array{Symbol}(n)
for i in 1:n
res[i] = Symbol(@sprintf "x%d" i)
end
return res
end
#' @description
#'
#' Count the number of null values in an array.
#'
#' @field a::AbstractArray The array whose missing values are to be counted.
#'
#' @returns count::Int The number of null values in `a`.
#'
#' @examples
#'
#' DataTables.countnull([1, 2, 3])
function countnull(a::AbstractArray)
res = 0
for x in a
res += _isnull(x)
end
return res
end
#' @description
#'
#' Count the number of missing values in a NullableArray.
#'
#' @field a::NullableArray The NullableArray whose missing values are to be counted.
#'
#' @returns count::Int The number of null values in `a`.
#'
#' @examples
#'
#' DataTables.countnull(NullableArray([1, 2, 3]))
countnull(a::NullableArray) = sum(a.isnull)
#' @description
#'
#' Count the number of missing values in a NullableCategoricalArray.
#'
#' @field na::CategoricalArray The CategoricalArray whose missing values
#' are to be counted.
#'
#' @returns count::Int The number of null values in `a`.
#'
#' @examples
#'
#' DataTables.countnull(CategoricalArray([1, 2, 3]))
function countnull(a::CategoricalArray)
res = 0
for x in a.refs
res += x == 0
end
return res
end
# Gets the name of a function. Used in groupedatatable/grouping.jl
function _fnames{T<:Function}(fs::Vector{T})
λcounter = 0
names = map(fs) do f
name = string(f)
if name == "(anonymous function)" # Anonymous functions with Julia < 0.5
λcounter += 1
name = "λ$(λcounter)"
end
name
end
names
end
_isnull(x::Any) = false
_isnull(x::Nullable) = isnull(x)
|
proofpile-julia0005-42880 | {
"provenance": "014.jsonl.gz:242881"
} | using DataDeps
register(DataDep(
"breast-cancer",
"http://archive.ics.uci.edu/ml/datasets/Breast+Cancer",
"http://archive.ics.uci.edu/ml/machine-learning-databases/breast-cancer/breast-cancer.data",
"ca7d3fa97b62ff967b6894ffbb3acaaf4a51062e0149e5950b3ad6f685970b65",
post_fetch_method=(path -> begin
UCIData.process_dataset(path,
target_index=10,
feature_indices=1:9,
categoric_indices=[1:6; 8:9],
)
end),
))
|
proofpile-julia0005-42881 | {
"provenance": "014.jsonl.gz:242882"
} |
function f(x,y) ::
Int64 end
|
proofpile-julia0005-42882 | {
"provenance": "014.jsonl.gz:242883"
} | function compute_edge_faces_ring(faces)
"""
compute_edge_face_ring - compute faces adjacent to each edge
e2f = compute_edge_face_ring(faces);
e2f(i,j) and e2f(j,i) are the number of the two faces adjacent to
edge (i,j).
Copyright (c) 2007 Gabriel Peyre
"""
n = maximum(faces)
m = size(faces,2)
i = [faces[1,:] faces[2,:] faces[3,:]]
j = [faces[2,:] faces[3,:] faces[1,:]]
s = [1:m 1:m 1:m];
# first without duplicate
tmp = unique(i + (maximum(i) + 1)*j) # unique
I = indexin(tmp, i + (maximum(i) + 1)*j)
# remaining items
J = setdiff(1:length(s), I);
# flip the duplicates
i1 = [i[I]; j[J]]
j1 = [j[I]; i[J]]
s = [s[I]; s[J]]
# remove doublons
tmp = unique(i1 + (maximum(i1) + 1)*j1)
I = indexin(tmp, i1 + (maximum(i1) + 1)*j1)
i1 = i1[I]
j1 = j1[I]
s = s[I]
A = sparse(i1,j1,s,n,n)
# add missing points
I = findall(A'.!=0)
I = I[A[I].==0]
A[I]=-1
return A
end;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.