diff --git a/.travis.yml b/.travis.yml index dddc34a..ec0782e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ os: - linux - osx julia: - - 0.6 + - 1.0 - nightly notifications: email: false diff --git a/README.md b/README.md index 3e3cc3a..8f37541 100644 --- a/README.md +++ b/README.md @@ -10,8 +10,6 @@ Add FileIO.jl integration for FCS files ```julia julia> using FileIO -julia> using FCSFiles - julia> flowrun = load("example.fcs") FCS.FlowSample{Float32} Machine: LSRFortessa diff --git a/REQUIRE b/REQUIRE index 3e44cf6..ce5b51f 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,2 +1,2 @@ -julia 0.6 +julia 0.7 FileIO diff --git a/src/FCSFiles.jl b/src/FCSFiles.jl index d646fa5..0735370 100644 --- a/src/FCSFiles.jl +++ b/src/FCSFiles.jl @@ -8,11 +8,7 @@ include("parse.jl") export FlowSample -try - add_format(format"FCS", "FCS", [".fcs"]) -end - -function FileIO.load(f::File{format"FCS"}) +function load(f::File{format"FCS"}) open(f) do io offsets = parse_header(io) diff --git a/src/parse.jl b/src/parse.jl index 75f77b3..a3ba80d 100644 --- a/src/parse.jl +++ b/src/parse.jl @@ -1,16 +1,20 @@ function parse_header(io) seekstart(io) - version = String(read(io, UInt8, 6)) + rawversion = Array{UInt8}(undef, 6) + read!(io, rawversion) + version = String(rawversion) if "$version" != "FCS3.0" && version != "FCS3.1" warn("$version files are not guaranteed to work") end seek(io, 10) # start, end positions of TEXT, DATA, and ANALYSIS sections - offsets = Array{Int64}(6) + offsets = Array{Int64}(undef, 6) for i in 1:6 # offsets are encoded as ASCII strings - raw_str = String(read(io, UInt8, 8)) - offsets[i] = parse(Int, strip(join(raw_str))) + raw_str = Array{UInt8}(undef, 8) + read!(io, raw_str) + offsets_str = String(raw_str) + offsets[i] = parse(Int, strip(join(offsets_str))) end # DATA offsets are larger than 99,999,999bytes @@ -24,14 +28,16 @@ end function parse_text(io, start_text::Int, end_text::Int) seek(io, start_text) # TODO: Check for supplemental TEXT file - raw_text = String(read(io, UInt8, end_text - start_text + 1)) + raw_btext = Array{UInt8}(undef, end_text - start_text + 1) + read!(io, raw_btext) + raw_text = String(raw_btext) delimiter = raw_text[1] text_mappings = Dict{String, String}() # initialize iterator - prev, state = next(raw_text, start(raw_text)) - while !done(raw_text, state) - i, state = next(raw_text, state) + iter_result = iterate(raw_text) + while iter_result !== nothing + i, state = iter_result # found a new key, value pair if i == '$' @@ -42,7 +48,7 @@ function parse_text(io, start_text::Int, end_text::Int) # FCS keywords are case insensitive so force them uppercase text_mappings["\$"*uppercase(key)] = value end - prev = i + iter_result = iterate(raw_text, state) end text_mappings end @@ -56,7 +62,8 @@ function parse_data(io, # Add support for data types other than float (text_mappings["\$DATATYPE"] != "F") && error("Non float32 support not implemented yet. Please see github issues for this project.") - flat_data = read(io, Float32, (end_data - start_data + 1) ÷ 4) + flat_data = Array{Float32}(undef, (end_data - start_data + 1) ÷ 4) + read!(io, flat_data) endian_func = get_endian_func(text_mappings) map!(endian_func, flat_data, flat_data) diff --git a/src/type.jl b/src/type.jl index 4f19649..8e81fdc 100644 --- a/src/type.jl +++ b/src/type.jl @@ -1,4 +1,4 @@ -immutable FlowSample{T} +struct FlowSample{T} data::Dict{String, Vector{T}} params::Dict{String, String} end @@ -38,6 +38,5 @@ Base.haskey(f::FlowSample, x) = haskey(f.data, x) Base.getindex(f::FlowSample, key) = f.data[key] Base.keys(f::FlowSample) = keys(f.data) Base.values(f::FlowSample) = values(f.data) -Base.start(iter::FlowSample) = start(iter.data) -Base.next(iter::FlowSample, state) = next(iter.data, state) -Base.done(iter::FlowSample, state) = done(iter.data, state) +Base.iterate(iter::FlowSample) = Base.iterate(iter.data) +Base.iterate(iter::FlowSample, state) = Base.iterate(iter.data, state) diff --git a/src/utils.jl b/src/utils.jl index 084828a..5594f0f 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -29,8 +29,9 @@ and the state of the iterator. function grab_word(iter, state, delimiter::Char) word = Char[] prev = ' ' - while !done(iter, state) - i, state = next(iter, state) + iter_result = iterate(iter, state) + while iter_result !== nothing + i, state = iter_result # only add character if the current and previous are both # delimiters (i.e. escaped) or neither are @@ -40,6 +41,7 @@ function grab_word(iter, state, delimiter::Char) else break end + iter_result = iterate(iter, state) end join(word), state end @@ -53,7 +55,7 @@ returned by `parse_text` """ function verify_text(text_mappings::Dict{String, String}) # get all parameterized keywords $P1N, $P2N, etc - is_param = [contains(keyword, "n") for keyword in required_keywords] + is_param = [occursin("n", keyword) for keyword in required_keywords] # verify that all non-parameterized keywords are present in the mapping for non_param in required_keywords[.~is_param] @@ -69,7 +71,7 @@ function verify_text(text_mappings::Dict{String, String}) for params in required_keywords[is_param] for i in 1:n_params - if !haskey(text_mappings, replace(params, "n", i)) + if !haskey(text_mappings, replace(params, "n"=>i)) error("FCS file is corrupted. It is missing required keyword $non_param in its TEXT section") end end diff --git a/test/runtests.jl b/test/runtests.jl index eaead66..bc0fc3d 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,6 +1,6 @@ using FCSFiles using FileIO -using Base.Test +using Test flowrun = load("testdata/BD-FACS-Aria-II.fcs")