Merge pull request #26 from yaslab/compatibility
Keep compatibility with version 1
This commit is contained in:
commit
c3402699e7
|
@ -2,3 +2,5 @@ disabled_rules:
|
|||
- force_cast
|
||||
- force_try
|
||||
- variable_name
|
||||
included:
|
||||
- Sources
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
//
|
||||
// CSV.swift
|
||||
// CSV
|
||||
//
|
||||
// Created by Yasuhiro Hatta on 2016/06/11.
|
||||
// Copyright © 2016 yaslab. All rights reserved.
|
||||
//
|
||||
|
||||
public typealias CSV = CSVReader
|
||||
|
||||
extension CSV: Sequence { }
|
|
@ -33,6 +33,9 @@
|
|||
0E7E8D001D0BCDCF0057A1C1 /* CSVReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7E8C9D1D0BC7F10057A1C1 /* CSVReader.swift */; };
|
||||
0E7E8D011D0BCDCF0057A1C1 /* CSVError.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7E8C9E1D0BC7F10057A1C1 /* CSVError.swift */; };
|
||||
0E7E8D021D0BCDCF0057A1C1 /* CSVVersion.h in Headers */ = {isa = PBXBuildFile; fileRef = 0E7E8C9F1D0BC7F10057A1C1 /* CSVVersion.h */; settings = {ATTRIBUTES = (Public, ); }; };
|
||||
0E7F657B1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7F657A1EF6437E00E1E1A0 /* Version1Tests.swift */; };
|
||||
0E7F657C1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7F657A1EF6437E00E1E1A0 /* Version1Tests.swift */; };
|
||||
0E7F657D1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0E7F657A1EF6437E00E1E1A0 /* Version1Tests.swift */; };
|
||||
0EA2AB7C1D183B45003EC967 /* BinaryReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB7B1D183B45003EC967 /* BinaryReader.swift */; };
|
||||
0EA2AB7D1D183B45003EC967 /* BinaryReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB7B1D183B45003EC967 /* BinaryReader.swift */; };
|
||||
0EA2AB7E1D183B45003EC967 /* BinaryReader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB7B1D183B45003EC967 /* BinaryReader.swift */; };
|
||||
|
@ -41,6 +44,10 @@
|
|||
0EA2AB821D183BA9003EC967 /* UnicodeIterator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB801D183BA9003EC967 /* UnicodeIterator.swift */; };
|
||||
0EA2AB831D183BA9003EC967 /* UnicodeIterator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB801D183BA9003EC967 /* UnicodeIterator.swift */; };
|
||||
0EA2AB841D183BA9003EC967 /* UnicodeIterator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EA2AB801D183BA9003EC967 /* UnicodeIterator.swift */; };
|
||||
0EBC9E691EE064A500432A2D /* CSV.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EBC9E681EE064A500432A2D /* CSV.swift */; };
|
||||
0EBC9E6A1EE064A500432A2D /* CSV.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EBC9E681EE064A500432A2D /* CSV.swift */; };
|
||||
0EBC9E6B1EE064A500432A2D /* CSV.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EBC9E681EE064A500432A2D /* CSV.swift */; };
|
||||
0EBC9E6C1EE064A500432A2D /* CSV.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EBC9E681EE064A500432A2D /* CSV.swift */; };
|
||||
0EDF8ED71DDB73520068056A /* CSVTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EDF8ECD1DDB73370068056A /* CSVTests.swift */; };
|
||||
0EDF8ED81DDB73520068056A /* LineBreakTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EDF8ECE1DDB73370068056A /* LineBreakTests.swift */; };
|
||||
0EDF8ED91DDB73520068056A /* ReadmeTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0EDF8ECF1DDB73370068056A /* ReadmeTests.swift */; };
|
||||
|
@ -98,8 +105,10 @@
|
|||
0E7E8CCF1D0BCA2A0057A1C1 /* CSVTests-OSX.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "CSVTests-OSX.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
0E7E8CE81D0BCD0B0057A1C1 /* CSV.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = CSV.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
0E7E8CF11D0BCD0B0057A1C1 /* CSVTests-tvOS.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "CSVTests-tvOS.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
0E7F657A1EF6437E00E1E1A0 /* Version1Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Version1Tests.swift; sourceTree = "<group>"; };
|
||||
0EA2AB7B1D183B45003EC967 /* BinaryReader.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = BinaryReader.swift; sourceTree = "<group>"; };
|
||||
0EA2AB801D183BA9003EC967 /* UnicodeIterator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UnicodeIterator.swift; sourceTree = "<group>"; };
|
||||
0EBC9E681EE064A500432A2D /* CSV.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CSV.swift; sourceTree = SOURCE_ROOT; };
|
||||
0EDF8ECD1DDB73370068056A /* CSVTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CSVTests.swift; sourceTree = "<group>"; };
|
||||
0EDF8ECE1DDB73370068056A /* LineBreakTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LineBreakTests.swift; sourceTree = "<group>"; };
|
||||
0EDF8ECF1DDB73370068056A /* ReadmeTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReadmeTests.swift; sourceTree = "<group>"; };
|
||||
|
@ -191,6 +200,7 @@
|
|||
isa = PBXGroup;
|
||||
children = (
|
||||
0EA2AB7B1D183B45003EC967 /* BinaryReader.swift */,
|
||||
0EBC9E681EE064A500432A2D /* CSV.swift */,
|
||||
0E7E8C9E1D0BC7F10057A1C1 /* CSVError.swift */,
|
||||
0E7E8C9D1D0BC7F10057A1C1 /* CSVReader.swift */,
|
||||
0E7E8C9F1D0BC7F10057A1C1 /* CSVVersion.h */,
|
||||
|
@ -220,6 +230,7 @@
|
|||
0EDF8ECF1DDB73370068056A /* ReadmeTests.swift */,
|
||||
0EDF8ED01DDB73370068056A /* TrimFieldsTests.swift */,
|
||||
0EDF8ED11DDB73370068056A /* UnicodeTests.swift */,
|
||||
0E7F657A1EF6437E00E1E1A0 /* Version1Tests.swift */,
|
||||
);
|
||||
path = CSVTests;
|
||||
sourceTree = "<group>";
|
||||
|
@ -526,6 +537,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
0EBC9E6A1EE064A500432A2D /* CSV.swift in Sources */,
|
||||
0EA2AB821D183BA9003EC967 /* UnicodeIterator.swift in Sources */,
|
||||
0E5402241EDA82220019C3ED /* CSVWriter.swift in Sources */,
|
||||
0E7E8CA11D0BC7F10057A1C1 /* CSVReader.swift in Sources */,
|
||||
|
@ -542,6 +554,7 @@
|
|||
0EDF8EDE1DDB73520068056A /* ReadmeTests.swift in Sources */,
|
||||
0EDF8EDC1DDB73520068056A /* CSVTests.swift in Sources */,
|
||||
0EDF8EDF1DDB73520068056A /* TrimFieldsTests.swift in Sources */,
|
||||
0E7F657C1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */,
|
||||
0EDF8EE01DDB73520068056A /* UnicodeTests.swift in Sources */,
|
||||
0EDF8EDD1DDB73520068056A /* LineBreakTests.swift in Sources */,
|
||||
0E54021C1ED9DDF40019C3ED /* CSVWriterTests.swift in Sources */,
|
||||
|
@ -552,6 +565,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
0EBC9E6C1EE064A500432A2D /* CSV.swift in Sources */,
|
||||
0EA2AB841D183BA9003EC967 /* UnicodeIterator.swift in Sources */,
|
||||
0E5402221EDA82220019C3ED /* CSVWriter.swift in Sources */,
|
||||
0E7E8CBE1D0BC9D70057A1C1 /* CSVReader.swift in Sources */,
|
||||
|
@ -565,6 +579,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
0EBC9E691EE064A500432A2D /* CSV.swift in Sources */,
|
||||
0EA2AB811D183BA9003EC967 /* UnicodeIterator.swift in Sources */,
|
||||
0E5402251EDA82230019C3ED /* CSVWriter.swift in Sources */,
|
||||
0E7E8CE01D0BCA8E0057A1C1 /* CSVReader.swift in Sources */,
|
||||
|
@ -581,6 +596,7 @@
|
|||
0EDF8ED91DDB73520068056A /* ReadmeTests.swift in Sources */,
|
||||
0EDF8ED71DDB73520068056A /* CSVTests.swift in Sources */,
|
||||
0EDF8EDA1DDB73520068056A /* TrimFieldsTests.swift in Sources */,
|
||||
0E7F657B1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */,
|
||||
0EDF8EDB1DDB73520068056A /* UnicodeTests.swift in Sources */,
|
||||
0EDF8ED81DDB73520068056A /* LineBreakTests.swift in Sources */,
|
||||
0E54021B1ED9DDF40019C3ED /* CSVWriterTests.swift in Sources */,
|
||||
|
@ -591,6 +607,7 @@
|
|||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
0EBC9E6B1EE064A500432A2D /* CSV.swift in Sources */,
|
||||
0EA2AB831D183BA9003EC967 /* UnicodeIterator.swift in Sources */,
|
||||
0E5402231EDA82220019C3ED /* CSVWriter.swift in Sources */,
|
||||
0E7E8D001D0BCDCF0057A1C1 /* CSVReader.swift in Sources */,
|
||||
|
@ -607,6 +624,7 @@
|
|||
0EDF8EE31DDB73530068056A /* ReadmeTests.swift in Sources */,
|
||||
0EDF8EE11DDB73530068056A /* CSVTests.swift in Sources */,
|
||||
0EDF8EE41DDB73530068056A /* TrimFieldsTests.swift in Sources */,
|
||||
0E7F657D1EF6437E00E1E1A0 /* Version1Tests.swift in Sources */,
|
||||
0EDF8EE51DDB73530068056A /* UnicodeTests.swift in Sources */,
|
||||
0EDF8EE21DDB73530068056A /* LineBreakTests.swift in Sources */,
|
||||
0E54021D1ED9DDF40019C3ED /* CSVWriterTests.swift in Sources */,
|
||||
|
|
|
@ -53,8 +53,8 @@ internal class BinaryReader {
|
|||
|
||||
internal init(
|
||||
stream: InputStream,
|
||||
endian: Endian = .unknown,
|
||||
closeOnDeinit: Bool = true) throws {
|
||||
endian: Endian,
|
||||
closeOnDeinit: Bool) throws {
|
||||
|
||||
var endian = endian
|
||||
|
||||
|
@ -62,7 +62,7 @@ internal class BinaryReader {
|
|||
stream.open()
|
||||
}
|
||||
if stream.streamStatus != .open {
|
||||
throw CSVError.cannotOpenStream
|
||||
throw CSVError.cannotOpenFile
|
||||
}
|
||||
|
||||
let readCount = stream.read(tempBuffer, maxLength: tempBufferSize)
|
||||
|
@ -93,7 +93,7 @@ internal class BinaryReader {
|
|||
|
||||
private func readStream(_ buffer: UnsafeMutablePointer<UInt8>, maxLength: Int) throws -> Int {
|
||||
if stream.streamStatus != .open {
|
||||
throw CSVError.cannotReadStream
|
||||
throw CSVError.cannotReadFile
|
||||
}
|
||||
|
||||
var i = 0
|
||||
|
@ -115,7 +115,7 @@ internal class BinaryReader {
|
|||
throw CSVError.streamErrorHasOccurred(error: stream.streamError!)
|
||||
}
|
||||
if length != bufferSize {
|
||||
throw CSVError.cannotReadStream
|
||||
throw CSVError.cannotReadFile
|
||||
}
|
||||
return buffer[0]
|
||||
}
|
||||
|
@ -169,7 +169,7 @@ extension BinaryReader {
|
|||
internal class UInt8Iterator: Sequence, IteratorProtocol {
|
||||
|
||||
private let reader: BinaryReader
|
||||
internal var errorHandler: ((Error) -> Void)? = nil
|
||||
internal var errorHandler: ((Error) -> Void)?
|
||||
|
||||
fileprivate init(reader: BinaryReader) {
|
||||
self.reader = reader
|
||||
|
@ -200,7 +200,7 @@ extension BinaryReader {
|
|||
internal class UInt16Iterator: Sequence, IteratorProtocol {
|
||||
|
||||
private let reader: BinaryReader
|
||||
internal var errorHandler: ((Error) -> Void)? = nil
|
||||
internal var errorHandler: ((Error) -> Void)?
|
||||
|
||||
fileprivate init(reader: BinaryReader) {
|
||||
self.reader = reader
|
||||
|
@ -231,7 +231,7 @@ extension BinaryReader {
|
|||
internal class UInt32Iterator: Sequence, IteratorProtocol {
|
||||
|
||||
private let reader: BinaryReader
|
||||
internal var errorHandler: ((Error) -> Void)? = nil
|
||||
internal var errorHandler: ((Error) -> Void)?
|
||||
|
||||
fileprivate init(reader: BinaryReader) {
|
||||
self.reader = reader
|
||||
|
|
|
@ -10,9 +10,9 @@
|
|||
public enum CSVError: Error {
|
||||
|
||||
/// No overview available.
|
||||
case cannotOpenStream
|
||||
case cannotOpenFile
|
||||
/// No overview available.
|
||||
case cannotReadStream
|
||||
case cannotReadFile
|
||||
/// No overview available.
|
||||
case cannotWriteStream
|
||||
/// No overview available.
|
||||
|
@ -20,7 +20,7 @@ public enum CSVError: Error {
|
|||
/// No overview available.
|
||||
case unicodeDecoding
|
||||
/// No overview available.
|
||||
case cannotReadHeaderRecord
|
||||
case cannotReadHeaderRow
|
||||
/// No overview available.
|
||||
case stringEncodingMismatch
|
||||
/// No overview available.
|
||||
|
|
|
@ -15,62 +15,61 @@ internal let DQUOTE: UnicodeScalar = "\""
|
|||
internal let DQUOTE_STR: String = "\""
|
||||
internal let DQUOTE2_STR: String = "\"\""
|
||||
|
||||
internal let defaultHasHeaderRecord = false
|
||||
internal let defaultHasHeaderRow = false
|
||||
internal let defaultTrimFields = false
|
||||
internal let defaultDelimiter: UnicodeScalar = ","
|
||||
internal let defaultWhitespaces = CharacterSet.whitespaces
|
||||
internal let defaultNewline: UnicodeScalar = LF
|
||||
|
||||
/// No overview available.
|
||||
public class CSVReader {
|
||||
|
||||
|
||||
/// No overview available.
|
||||
public struct Configuration {
|
||||
|
||||
public var fileInputErrorHandler: ((Error, Int, Int) -> Void)? = nil
|
||||
|
||||
/// `true` if the CSV has a header record, otherwise `false`. Default: `false`.
|
||||
public var hasHeaderRecord: Bool
|
||||
|
||||
/// `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
public var hasHeaderRow: Bool
|
||||
/// No overview available.
|
||||
public var trimFields: Bool
|
||||
/// Default: `","`.
|
||||
public var delimiter: UnicodeScalar
|
||||
/// No overview available.
|
||||
public var whitespaces: CharacterSet
|
||||
|
||||
|
||||
/// No overview available.
|
||||
public init(
|
||||
hasHeaderRecord: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces) {
|
||||
|
||||
self.hasHeaderRecord = hasHeaderRecord
|
||||
internal init(
|
||||
hasHeaderRow: Bool,
|
||||
trimFields: Bool,
|
||||
delimiter: UnicodeScalar,
|
||||
whitespaces: CharacterSet) {
|
||||
|
||||
self.hasHeaderRow = hasHeaderRow
|
||||
self.trimFields = trimFields
|
||||
self.delimiter = delimiter
|
||||
|
||||
|
||||
var whitespaces = whitespaces
|
||||
_ = whitespaces.remove(delimiter)
|
||||
self.whitespaces = whitespaces
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
fileprivate var iterator: AnyIterator<UnicodeScalar>
|
||||
//public let stream: InputStream?
|
||||
public let configuration: Configuration
|
||||
public fileprivate (set) var error: Error?
|
||||
|
||||
fileprivate var back: UnicodeScalar? = nil
|
||||
fileprivate var back: UnicodeScalar?
|
||||
fileprivate var fieldBuffer = String.UnicodeScalarView()
|
||||
|
||||
fileprivate var currentRecordIndex: Int = 0
|
||||
fileprivate var currentRowIndex: Int = 0
|
||||
fileprivate var currentFieldIndex: Int = 0
|
||||
|
||||
/// CSV header record. To set a value for this property,
|
||||
/// you set `true` to `headerRecord` in initializer.
|
||||
public private (set) var headerRecord: [String]? = nil
|
||||
/// CSV header row. To set a value for this property,
|
||||
/// you set `true` to `headerRow` in initializer.
|
||||
public private (set) var headerRow: [String]?
|
||||
|
||||
public fileprivate (set) var currentRow: [String]?
|
||||
|
||||
public fileprivate (set) var currentRecord: [String]? = nil
|
||||
|
||||
internal init<T: IteratorProtocol>(
|
||||
iterator: T,
|
||||
configuration: Configuration
|
||||
|
@ -79,34 +78,42 @@ public class CSVReader {
|
|||
self.iterator = AnyIterator(iterator)
|
||||
self.configuration = configuration
|
||||
|
||||
if configuration.hasHeaderRecord {
|
||||
guard let headerRecord = readRecord() else {
|
||||
throw CSVError.cannotReadHeaderRecord
|
||||
if configuration.hasHeaderRow {
|
||||
guard let headerRow = readRow() else {
|
||||
throw CSVError.cannotReadHeaderRow
|
||||
}
|
||||
self.headerRecord = headerRecord
|
||||
self.headerRow = headerRow
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
extension CSVReader {
|
||||
|
||||
|
||||
/// Create an instance with `InputStream`.
|
||||
///
|
||||
/// - parameter stream: An `InputStream` object. If the stream is not open,
|
||||
/// initializer opens automatically.
|
||||
/// - parameter codecType: A `UnicodeCodec` type for `stream`.
|
||||
/// - parameter config: CSV configuration.
|
||||
/// - parameter hasHeaderRow: `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
/// - parameter delimiter: Default: `","`.
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
configuration: Configuration = Configuration()
|
||||
hasHeaderRow: Bool = defaultHasHeaderRow,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces
|
||||
) throws where T.CodeUnit == UInt8 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: .unknown, closeOnDeinit: true)
|
||||
let input = reader.makeUInt8Iterator()
|
||||
let iterator = UnicodeIterator(input: input, inputEncodingType: codecType)
|
||||
try self.init(iterator: iterator, configuration: configuration)
|
||||
let config = Configuration(hasHeaderRow: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter,
|
||||
whitespaces: whitespaces)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
input.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
iterator.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
}
|
||||
|
@ -117,18 +124,26 @@ extension CSVReader {
|
|||
/// initializer opens automatically.
|
||||
/// - parameter codecType: A `UnicodeCodec` type for `stream`.
|
||||
/// - parameter endian: Endian to use when reading a stream. Default: `.big`.
|
||||
/// - parameter config: CSV configuration.
|
||||
/// - parameter hasHeaderRow: `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
/// - parameter delimiter: Default: `","`.
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
configuration: Configuration = Configuration()
|
||||
hasHeaderRow: Bool = defaultHasHeaderRow,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces
|
||||
) throws where T.CodeUnit == UInt16 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: endian, closeOnDeinit: true)
|
||||
let input = reader.makeUInt16Iterator()
|
||||
let iterator = UnicodeIterator(input: input, inputEncodingType: codecType)
|
||||
try self.init(iterator: iterator, configuration: configuration)
|
||||
let config = Configuration(hasHeaderRow: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter,
|
||||
whitespaces: whitespaces)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
input.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
iterator.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
}
|
||||
|
@ -139,57 +154,86 @@ extension CSVReader {
|
|||
/// initializer opens automatically.
|
||||
/// - parameter codecType: A `UnicodeCodec` type for `stream`.
|
||||
/// - parameter endian: Endian to use when reading a stream. Default: `.big`.
|
||||
/// - parameter config: CSV configuration.
|
||||
/// - parameter hasHeaderRow: `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
/// - parameter delimiter: Default: `","`.
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
configuration: Configuration = Configuration()
|
||||
hasHeaderRow: Bool = defaultHasHeaderRow,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces
|
||||
) throws where T.CodeUnit == UInt32 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: endian, closeOnDeinit: true)
|
||||
let input = reader.makeUInt32Iterator()
|
||||
let iterator = UnicodeIterator(input: input, inputEncodingType: codecType)
|
||||
try self.init(iterator: iterator, configuration: configuration)
|
||||
let config = Configuration(hasHeaderRow: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter,
|
||||
whitespaces: whitespaces)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
input.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
iterator.errorHandler = { [unowned self] in self.errorHandler(error: $0) }
|
||||
}
|
||||
|
||||
|
||||
/// Create an instance with `InputStream`.
|
||||
///
|
||||
/// - parameter stream: An `InputStream` object. If the stream is not open,
|
||||
/// initializer opens automatically.
|
||||
/// - parameter config: CSV configuration.
|
||||
/// - parameter hasHeaderRow: `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
/// - parameter delimiter: Default: `","`.
|
||||
public convenience init(
|
||||
stream: InputStream,
|
||||
configuration: Configuration = Configuration()) throws {
|
||||
|
||||
try self.init(stream: stream, codecType: UTF8.self, configuration: configuration)
|
||||
hasHeaderRow: Bool = defaultHasHeaderRow,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces
|
||||
) throws {
|
||||
|
||||
try self.init(
|
||||
stream: stream,
|
||||
codecType: UTF8.self,
|
||||
hasHeaderRow: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter,
|
||||
whitespaces: whitespaces)
|
||||
}
|
||||
|
||||
|
||||
/// Create an instance with CSV string.
|
||||
///
|
||||
/// - parameter string: An CSV string.
|
||||
/// - parameter config: CSV configuration.
|
||||
/// - parameter hasHeaderRow: `true` if the CSV has a header row, otherwise `false`. Default: `false`.
|
||||
/// - parameter delimiter: Default: `","`.
|
||||
public convenience init(
|
||||
string: String,
|
||||
configuration: Configuration = Configuration()) throws {
|
||||
|
||||
hasHeaderRow: Bool = defaultHasHeaderRow,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter,
|
||||
whitespaces: CharacterSet = defaultWhitespaces
|
||||
) throws {
|
||||
|
||||
let iterator = string.unicodeScalars.makeIterator()
|
||||
try self.init(iterator: iterator, configuration: configuration)
|
||||
let config = Configuration(hasHeaderRow: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter,
|
||||
whitespaces: whitespaces)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
}
|
||||
|
||||
private func errorHandler(error: Error) {
|
||||
configuration.fileInputErrorHandler?(error, currentRecordIndex, currentFieldIndex)
|
||||
//configuration.fileInputErrorHandler?(error, currentRowIndex, currentFieldIndex)
|
||||
self.error = error
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// MARK: - Parse CSV
|
||||
|
||||
extension CSVReader {
|
||||
|
||||
fileprivate func readRecord() -> [String]? {
|
||||
|
||||
fileprivate func readRow() -> [String]? {
|
||||
currentFieldIndex = 0
|
||||
|
||||
var c = moveNext()
|
||||
|
@ -197,7 +241,7 @@ extension CSVReader {
|
|||
return nil
|
||||
}
|
||||
|
||||
var record = [String]()
|
||||
var row = [String]()
|
||||
var field: String
|
||||
var end: Bool
|
||||
while true {
|
||||
|
@ -221,7 +265,7 @@ extension CSVReader {
|
|||
field = field.trimmingCharacters(in: configuration.whitespaces)
|
||||
}
|
||||
}
|
||||
record.append(field)
|
||||
row.append(field)
|
||||
if end {
|
||||
break
|
||||
}
|
||||
|
@ -231,10 +275,10 @@ extension CSVReader {
|
|||
c = moveNext()
|
||||
}
|
||||
|
||||
currentRecordIndex += 1
|
||||
currentRowIndex += 1
|
||||
|
||||
currentRecord = record
|
||||
return record
|
||||
currentRow = row
|
||||
return row
|
||||
}
|
||||
|
||||
private func readField(quoted: Bool) -> (String, Bool) {
|
||||
|
@ -259,7 +303,7 @@ extension CSVReader {
|
|||
back = cNextNext
|
||||
}
|
||||
}
|
||||
// END RECORD
|
||||
// END ROW
|
||||
return (String(fieldBuffer), true)
|
||||
} else if cNext == configuration.delimiter {
|
||||
// END FIELD
|
||||
|
@ -282,7 +326,7 @@ extension CSVReader {
|
|||
back = cNext
|
||||
}
|
||||
}
|
||||
// END RECORD
|
||||
// END ROW
|
||||
return (String(fieldBuffer), true)
|
||||
} else if c == configuration.delimiter {
|
||||
// END FIELD
|
||||
|
@ -310,161 +354,47 @@ extension CSVReader {
|
|||
}
|
||||
|
||||
extension CSVReader {
|
||||
|
||||
public func enumerateRecords(_ block: (([String], [String]?, inout Bool) throws -> Void)) rethrows {
|
||||
|
||||
public func enumerateRows(_ block: (([String], [String]?, inout Bool) throws -> Void)) throws {
|
||||
var stop = false
|
||||
while let record = readRecord() {
|
||||
try block(record, headerRecord, &stop)
|
||||
while let row = readRow() {
|
||||
try block(row, headerRow, &stop)
|
||||
if stop {
|
||||
break
|
||||
}
|
||||
}
|
||||
if let error = error {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
extension CSVReader: IteratorProtocol {
|
||||
|
||||
|
||||
@discardableResult
|
||||
public func next() -> [String]? {
|
||||
return readRecord()
|
||||
return readRow()
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
extension CSVReader {
|
||||
|
||||
public subscript(key: String) -> String? {
|
||||
guard let header = headerRecord else {
|
||||
fatalError("CSVReader.headerRecord must not be nil")
|
||||
guard let header = headerRow else {
|
||||
fatalError("CSVReader.headerRow must not be nil")
|
||||
}
|
||||
guard let index = header.index(of: key) else {
|
||||
return nil
|
||||
}
|
||||
guard let record = currentRecord else {
|
||||
fatalError("CSVReader.currentRecord must not be nil")
|
||||
guard let row = currentRow else {
|
||||
fatalError("CSVReader.currentRow must not be nil")
|
||||
}
|
||||
if index >= record.count {
|
||||
if index >= row.count {
|
||||
return nil
|
||||
}
|
||||
return record[index]
|
||||
return row[index]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// MARK: - deprecated
|
||||
|
||||
extension CSVReader {
|
||||
|
||||
/// Unavailable.
|
||||
@available(*, unavailable, message: "Use init(stream:codecType:config:) instead")
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
hasHeaderRow: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter
|
||||
) throws where T.CodeUnit == UInt8 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: .unknown, closeOnDeinit: true)
|
||||
let iterator = UnicodeIterator(
|
||||
input: reader.makeUInt8Iterator(),
|
||||
inputEncodingType: codecType
|
||||
)
|
||||
let config = Configuration(
|
||||
hasHeaderRecord: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter
|
||||
)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
}
|
||||
|
||||
/// Unavailable.
|
||||
@available(*, unavailable, message: "Use init(stream:codecType:endian:config:) instead")
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
hasHeaderRow: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter
|
||||
) throws where T.CodeUnit == UInt16 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: endian, closeOnDeinit: true)
|
||||
let iterator = UnicodeIterator(
|
||||
input: reader.makeUInt16Iterator(),
|
||||
inputEncodingType: codecType
|
||||
)
|
||||
let config = Configuration(
|
||||
hasHeaderRecord: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter
|
||||
)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
}
|
||||
|
||||
/// Unavailable.
|
||||
@available(*, unavailable, message: "Use init(stream:codecType:endian:config:) instead")
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: InputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
hasHeaderRow: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter
|
||||
) throws where T.CodeUnit == UInt32 {
|
||||
|
||||
let reader = try BinaryReader(stream: stream, endian: endian, closeOnDeinit: true)
|
||||
let iterator = UnicodeIterator(
|
||||
input: reader.makeUInt32Iterator(),
|
||||
inputEncodingType: codecType
|
||||
)
|
||||
let config = Configuration(
|
||||
hasHeaderRecord: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter
|
||||
)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
}
|
||||
|
||||
/// Unavailable.
|
||||
@available(*, unavailable, message: "Use init(stream:config:) instead")
|
||||
public convenience init(
|
||||
stream: InputStream,
|
||||
hasHeaderRow: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter) throws {
|
||||
|
||||
let config = Configuration(
|
||||
hasHeaderRecord: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter
|
||||
)
|
||||
try self.init(stream: stream, codecType: UTF8.self, configuration: config)
|
||||
}
|
||||
|
||||
/// Unavailable.
|
||||
@available(*, unavailable, message: "Use init(string:config:) instead")
|
||||
public convenience init(
|
||||
string: String,
|
||||
hasHeaderRow: Bool = defaultHasHeaderRecord,
|
||||
trimFields: Bool = defaultTrimFields,
|
||||
delimiter: UnicodeScalar = defaultDelimiter) throws {
|
||||
|
||||
let iterator = string.unicodeScalars.makeIterator()
|
||||
let config = Configuration(
|
||||
hasHeaderRecord: hasHeaderRow,
|
||||
trimFields: trimFields,
|
||||
delimiter: delimiter
|
||||
)
|
||||
try self.init(iterator: iterator, configuration: config)
|
||||
}
|
||||
|
||||
/// Unavailable
|
||||
// @available(*, unavailable, message: "Use CSV.Row.subscript(String) instead")
|
||||
// public subscript(key: String) -> String? {
|
||||
// // FIXME:
|
||||
// return nil
|
||||
// }
|
||||
|
||||
}
|
||||
|
|
|
@ -9,61 +9,71 @@
|
|||
import Foundation
|
||||
|
||||
public class CSVWriter {
|
||||
|
||||
|
||||
public struct Configuration {
|
||||
|
||||
|
||||
public var delimiter: String
|
||||
public var newline: String
|
||||
|
||||
public init(delimiter: String = String(defaultDelimiter), newline: String = String(LF)) {
|
||||
|
||||
internal init(delimiter: String, newline: String) {
|
||||
self.delimiter = delimiter
|
||||
self.newline = newline
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public let stream: OutputStream
|
||||
public let configuration: Configuration
|
||||
fileprivate let writeScalar: ((UnicodeScalar) throws -> Void)
|
||||
|
||||
fileprivate var isFirstRecord: Bool = true
|
||||
fileprivate var isFirstRow: Bool = true
|
||||
fileprivate var isFirstField: Bool = true
|
||||
|
||||
fileprivate init(
|
||||
stream: OutputStream,
|
||||
configuration: Configuration,
|
||||
writeScalar: @escaping ((UnicodeScalar) throws -> Void)) throws {
|
||||
|
||||
|
||||
self.stream = stream
|
||||
self.configuration = configuration
|
||||
self.writeScalar = writeScalar
|
||||
|
||||
|
||||
if stream.streamStatus == .notOpen {
|
||||
stream.open()
|
||||
}
|
||||
if stream.streamStatus != .open {
|
||||
throw CSVError.cannotOpenStream
|
||||
throw CSVError.cannotOpenFile
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
deinit {
|
||||
if stream.streamStatus == .open {
|
||||
stream.close()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
extension CSVWriter {
|
||||
|
||||
public convenience init(
|
||||
stream: OutputStream,
|
||||
configuration: Configuration = Configuration()) throws {
|
||||
delimiter: String = String(defaultDelimiter),
|
||||
newline: String = String(defaultNewline)
|
||||
) throws {
|
||||
|
||||
try self.init(stream: stream, codecType: UTF8.self, configuration: configuration)
|
||||
try self.init(stream: stream, codecType: UTF8.self, delimiter: delimiter, newline: newline)
|
||||
}
|
||||
|
||||
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: OutputStream,
|
||||
codecType: T.Type,
|
||||
configuration: Configuration = Configuration()
|
||||
delimiter: String = String(defaultDelimiter),
|
||||
newline: String = String(defaultNewline)
|
||||
) throws where T.CodeUnit == UInt8 {
|
||||
|
||||
try self.init(stream: stream, configuration: configuration) { (scalar: UnicodeScalar) throws in
|
||||
|
||||
let config = Configuration(delimiter: delimiter, newline: newline)
|
||||
try self.init(stream: stream, configuration: config) { (scalar: UnicodeScalar) throws in
|
||||
var error: CSVError? = nil
|
||||
codecType.encode(scalar) { (code: UInt8) in
|
||||
var code = code
|
||||
|
@ -77,20 +87,23 @@ extension CSVWriter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: OutputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
configuration: Configuration = Configuration()
|
||||
delimiter: String = String(defaultDelimiter),
|
||||
newline: String = String(defaultNewline)
|
||||
) throws where T.CodeUnit == UInt16 {
|
||||
|
||||
try self.init(stream: stream, configuration: configuration) { (scalar: UnicodeScalar) throws in
|
||||
|
||||
let config = Configuration(delimiter: delimiter, newline: newline)
|
||||
try self.init(stream: stream, configuration: config) { (scalar: UnicodeScalar) throws in
|
||||
var error: CSVError? = nil
|
||||
codecType.encode(scalar) { (code: UInt16) in
|
||||
var code = (endian == .big) ? code.bigEndian : code.littleEndian
|
||||
withUnsafeBytes(of: &code) { (buffer) -> Void in
|
||||
let count = stream.write(buffer.baseAddress!.assumingMemoryBound(to: UInt8.self), maxLength: buffer.count)
|
||||
let count = stream.write(buffer.baseAddress!.assumingMemoryBound(to: UInt8.self),
|
||||
maxLength: buffer.count)
|
||||
if count != buffer.count {
|
||||
error = CSVError.cannotWriteStream
|
||||
}
|
||||
|
@ -101,20 +114,23 @@ extension CSVWriter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public convenience init<T: UnicodeCodec>(
|
||||
stream: OutputStream,
|
||||
codecType: T.Type,
|
||||
endian: Endian = .big,
|
||||
configuration: Configuration = Configuration()
|
||||
delimiter: String = String(defaultDelimiter),
|
||||
newline: String = String(defaultNewline)
|
||||
) throws where T.CodeUnit == UInt32 {
|
||||
|
||||
try self.init(stream: stream, configuration: configuration) { (scalar: UnicodeScalar) throws in
|
||||
|
||||
let config = Configuration(delimiter: delimiter, newline: newline)
|
||||
try self.init(stream: stream, configuration: config) { (scalar: UnicodeScalar) throws in
|
||||
var error: CSVError? = nil
|
||||
codecType.encode(scalar) { (code: UInt32) in
|
||||
var code = (endian == .big) ? code.bigEndian : code.littleEndian
|
||||
withUnsafeBytes(of: &code) { (buffer) -> Void in
|
||||
let count = stream.write(buffer.baseAddress!.assumingMemoryBound(to: UInt8.self), maxLength: buffer.count)
|
||||
let count = stream.write(buffer.baseAddress!.assumingMemoryBound(to: UInt8.self),
|
||||
maxLength: buffer.count)
|
||||
if count != buffer.count {
|
||||
error = CSVError.cannotWriteStream
|
||||
}
|
||||
|
@ -130,44 +146,44 @@ extension CSVWriter {
|
|||
|
||||
extension CSVWriter {
|
||||
|
||||
public func beginNewRecord() {
|
||||
public func beginNewRow() {
|
||||
isFirstField = true
|
||||
}
|
||||
|
||||
|
||||
public func write(field value: String, quoted: Bool = false) throws {
|
||||
if isFirstRecord {
|
||||
isFirstRecord = false
|
||||
if isFirstRow {
|
||||
isFirstRow = false
|
||||
} else {
|
||||
if isFirstField {
|
||||
try configuration.newline.unicodeScalars.forEach(writeScalar)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if isFirstField {
|
||||
isFirstField = false
|
||||
} else {
|
||||
try configuration.delimiter.unicodeScalars.forEach(writeScalar)
|
||||
}
|
||||
|
||||
|
||||
var value = value
|
||||
|
||||
if quoted {
|
||||
|
||||
if quoted {
|
||||
value = value.replacingOccurrences(of: DQUOTE_STR, with: DQUOTE2_STR)
|
||||
try writeScalar(DQUOTE)
|
||||
}
|
||||
|
||||
|
||||
try value.unicodeScalars.forEach(writeScalar)
|
||||
|
||||
|
||||
if quoted {
|
||||
try writeScalar(DQUOTE)
|
||||
}
|
||||
}
|
||||
|
||||
public func write(record values: [String], quotedAtIndex: ((Int) -> Bool) = { _ in false }) throws {
|
||||
beginNewRecord()
|
||||
|
||||
public func write(row values: [String], quotedAtIndex: ((Int) -> Bool) = { _ in false }) throws {
|
||||
beginNewRow()
|
||||
for (i, value) in values.enumerated() {
|
||||
try write(field: value, quoted: quotedAtIndex(i))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -6,14 +6,14 @@
|
|||
// Copyright © 2016 yaslab. All rights reserved.
|
||||
//
|
||||
|
||||
/// No overview available.
|
||||
/// Represents byte order.
|
||||
public enum Endian {
|
||||
|
||||
/// No overview available.
|
||||
/// Big endian.
|
||||
case big
|
||||
/// No overview available.
|
||||
/// Little endian.
|
||||
case little
|
||||
/// No overview available.
|
||||
/// Multibyte character sets.
|
||||
case unknown
|
||||
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ internal class UnicodeIterator<
|
|||
|
||||
private var input: Input
|
||||
private var inputEncoding: InputEncoding
|
||||
internal var errorHandler: ((Error) -> Void)? = nil
|
||||
internal var errorHandler: ((Error) -> Void)?
|
||||
|
||||
internal init(input: Input, inputEncodingType: InputEncoding.Type) {
|
||||
self.input = input
|
||||
|
|
|
@ -26,7 +26,7 @@ class CSVTests: XCTestCase {
|
|||
("testSubscriptInt", testSubscriptInt),
|
||||
("testSubscriptString1", testSubscriptString1),
|
||||
("testSubscriptString2", testSubscriptString2),
|
||||
("testToArray", testToArray),
|
||||
("testToArray", testToArray)
|
||||
//("testToDictionary1", testToDictionary1),
|
||||
//("testToDictionary2", testToDictionary2)
|
||||
]
|
||||
|
@ -34,7 +34,7 @@ class CSVTests: XCTestCase {
|
|||
func testOneLine() {
|
||||
let csv = "\"abc\",1,2"
|
||||
var i = 0
|
||||
|
||||
|
||||
for record in AnyIterator(try! CSVReader(string: csv)) {
|
||||
switch i {
|
||||
case 0: XCTAssertEqual(record, ["abc", "1", "2"])
|
||||
|
@ -154,7 +154,11 @@ class CSVTests: XCTestCase {
|
|||
|
||||
func testCSVState1() {
|
||||
let it = "あ,い1,\"う\",えお\n,,x,".unicodeScalars.makeIterator()
|
||||
let csv = try! CSVReader(iterator: it, configuration: CSVReader.Configuration())
|
||||
let config = CSVReader.Configuration(hasHeaderRow: false,
|
||||
trimFields: false,
|
||||
delimiter: ",",
|
||||
whitespaces: .whitespaces)
|
||||
let csv = try! CSVReader(iterator: it, configuration: config)
|
||||
|
||||
var records = [[String]]()
|
||||
|
||||
|
@ -178,8 +182,7 @@ class CSVTests: XCTestCase {
|
|||
|
||||
func testSubscriptString1() {
|
||||
let csvString = "key1,key2\nvalue1,value2"
|
||||
let config = CSVReader.Configuration(hasHeaderRecord: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, hasHeaderRow: true)
|
||||
csv.next()
|
||||
XCTAssertEqual(csv["key1"], "value1")
|
||||
XCTAssertEqual(csv["key2"], "value2")
|
||||
|
@ -188,8 +191,7 @@ class CSVTests: XCTestCase {
|
|||
|
||||
func testSubscriptString2() {
|
||||
let csvString = "key1,key2\nvalue1"
|
||||
let config = CSVReader.Configuration(hasHeaderRecord: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, hasHeaderRow: true)
|
||||
csv.next()
|
||||
XCTAssertEqual(csv["key1"], "value1")
|
||||
XCTAssertNil(csv["key2"])
|
||||
|
|
|
@ -12,18 +12,18 @@ import XCTest
|
|||
import CSV
|
||||
|
||||
extension OutputStream {
|
||||
|
||||
|
||||
var data: Data? {
|
||||
guard let nsData = property(forKey: .dataWrittenToMemoryStreamKey) as? NSData else {
|
||||
return nil
|
||||
}
|
||||
return Data(referencing: nsData)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
class CSVWriterTests: XCTestCase {
|
||||
|
||||
|
||||
static let allTests = [
|
||||
("testSingleFieldSingleRecord", testSingleFieldSingleRecord),
|
||||
("testSingleFieldMultipleRecord", testSingleFieldMultipleRecord),
|
||||
|
@ -39,18 +39,18 @@ class CSVWriterTests: XCTestCase {
|
|||
("testUTF32BE", testUTF32BE),
|
||||
("testUTF32LE", testUTF32LE)
|
||||
]
|
||||
|
||||
|
||||
let str = "TEST-test-1234-😄😆👨👩👧👦"
|
||||
|
||||
|
||||
/// xxxx
|
||||
func testSingleFieldSingleRecord() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
@ -63,34 +63,34 @@ class CSVWriterTests: XCTestCase {
|
|||
func testSingleFieldMultipleRecord() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1")
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-2")
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1\n\(str)-2")
|
||||
}
|
||||
|
||||
|
||||
/// xxxx,xxxx
|
||||
func testMultipleFieldSingleRecord() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1")
|
||||
try! csv.write(field: str + "-2")
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1,\(str)-2")
|
||||
}
|
||||
|
||||
|
@ -99,88 +99,87 @@ class CSVWriterTests: XCTestCase {
|
|||
func testMultipleFieldMultipleRecord() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1-1")
|
||||
try! csv.write(field: str + "-1-2")
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-2-1")
|
||||
try! csv.write(field: str + "-2-2")
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1-1,\(str)-1-2\n\(str)-2-1,\(str)-2-2")
|
||||
}
|
||||
|
||||
|
||||
/// "xxxx",xxxx
|
||||
func testQuoted() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1", quoted: true)
|
||||
try! csv.write(field: str + "-2") // quoted: false
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\"\(str)-1\",\(str)-2")
|
||||
}
|
||||
|
||||
|
||||
/// xxxx,"xx\nxx"
|
||||
func testQuotedNewline() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1") // quoted: false
|
||||
try! csv.write(field: str + "-\n-2", quoted: true)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1,\"\(str)-\n-2\"")
|
||||
}
|
||||
|
||||
|
||||
/// xxxx,"xx""xx"
|
||||
func testEscapeQuote() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1") // quoted: false
|
||||
try! csv.write(field: str + "-\"-2", quoted: true)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1,\"\(str)-\"\"-2\"")
|
||||
}
|
||||
|
||||
|
||||
/// Test delimiter: "\t"
|
||||
func testDelimiter() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
let config = CSVWriter.Configuration(delimiter: "\t")
|
||||
let csv = try! CSVWriter.init(stream: stream, configuration: config)
|
||||
csv.beginNewRecord()
|
||||
|
||||
let csv = try! CSVWriter.init(stream: stream, delimiter: "\t")
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1")
|
||||
try! csv.write(field: str + "-2")
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1\t\(str)-2")
|
||||
}
|
||||
|
||||
|
@ -188,34 +187,33 @@ class CSVWriterTests: XCTestCase {
|
|||
func testNewline() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
let config = CSVWriter.Configuration(newline: "\r\n")
|
||||
let csv = try! CSVWriter.init(stream: stream, configuration: config)
|
||||
csv.beginNewRecord()
|
||||
|
||||
let csv = try! CSVWriter.init(stream: stream, newline: "\r\n")
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-1")
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str + "-2")
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf8)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, "\(str)-1\r\n\(str)-2")
|
||||
}
|
||||
|
||||
|
||||
/// UTF16 Big Endian
|
||||
func testUTF16BE() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream, codecType: UTF16.self, endian: .big)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf16BigEndian)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, str)
|
||||
}
|
||||
|
||||
|
@ -223,48 +221,48 @@ class CSVWriterTests: XCTestCase {
|
|||
func testUTF16LE() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream, codecType: UTF16.self, endian: .little)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf16LittleEndian)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, str)
|
||||
}
|
||||
|
||||
|
||||
/// UTF32 Big Endian
|
||||
func testUTF32BE() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream, codecType: UTF32.self, endian: .big)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf32BigEndian)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, str)
|
||||
}
|
||||
|
||||
|
||||
/// UTF32 Little Endian
|
||||
func testUTF32LE() {
|
||||
let stream = OutputStream(toMemory: ())
|
||||
stream.open()
|
||||
|
||||
|
||||
let csv = try! CSVWriter(stream: stream, codecType: UTF32.self, endian: .little)
|
||||
csv.beginNewRecord()
|
||||
csv.beginNewRow()
|
||||
try! csv.write(field: str)
|
||||
|
||||
|
||||
stream.close()
|
||||
let data = stream.data!
|
||||
let csvStr = String(data: data, encoding: .utf32LittleEndian)!
|
||||
|
||||
|
||||
XCTAssertEqual(csvStr, str)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -126,8 +126,8 @@ class LineBreakTests: XCTestCase {
|
|||
private func parse(csv: String) -> [[String]] {
|
||||
let reader = try! CSVReader(string: csv)
|
||||
var records = [[String]]()
|
||||
reader.enumerateRecords { (record, _, _) in
|
||||
records.append(record)
|
||||
try! reader.enumerateRows { (row, _, _) in
|
||||
records.append(row)
|
||||
}
|
||||
return records
|
||||
}
|
||||
|
|
|
@ -22,8 +22,8 @@ class ReadmeTests: XCTestCase {
|
|||
|
||||
func testFromCSVString() {
|
||||
let csv = try! CSVReader(string: "1,foo\n2,bar")
|
||||
csv.enumerateRecords { (record, _, _) in
|
||||
print("\(record)")
|
||||
try! csv.enumerateRows { (row, _, _) in
|
||||
print("\(row)")
|
||||
// => ["1", "foo"]
|
||||
// => ["2", "bar"]
|
||||
}
|
||||
|
@ -39,14 +39,14 @@ class ReadmeTests: XCTestCase {
|
|||
|
||||
func testGettingTheHeaderRow() {
|
||||
let csvString = "id,name\n1,foo\n2,bar"
|
||||
let config = CSVReader.Configuration(hasHeaderRecord: true) // It must be true.
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString,
|
||||
hasHeaderRow: true) // It must be true.
|
||||
|
||||
let headerRow = csv.headerRecord!
|
||||
let headerRow = csv.headerRow!
|
||||
print("\(headerRow)") // => ["id", "name"]
|
||||
|
||||
csv.enumerateRecords { (record, _, _) in
|
||||
print("\(record)")
|
||||
try! csv.enumerateRows { (row, _, _) in
|
||||
print("\(row)")
|
||||
// => ["1", "foo"]
|
||||
// => ["2", "bar"]
|
||||
}
|
||||
|
@ -56,9 +56,9 @@ class ReadmeTests: XCTestCase {
|
|||
let csvString = "1,foo"
|
||||
let csv = try! CSVReader(string: csvString)
|
||||
|
||||
csv.enumerateRecords { (record, _, _) in
|
||||
print("\(record[0])") // => "1"
|
||||
print("\(record[1])") // => "foo"
|
||||
try! csv.enumerateRows { (row, _, _) in
|
||||
print("\(row[0])") // => "1"
|
||||
print("\(row[1])") // => "foo"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,8 +29,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields1() {
|
||||
let csvString = "abc,def,ghi"
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -38,8 +37,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields2() {
|
||||
let csvString = " abc, def, ghi"
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -47,8 +45,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields3() {
|
||||
let csvString = "abc ,def ,ghi "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -56,8 +53,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields4() {
|
||||
let csvString = " abc , def , ghi "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -65,8 +61,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields5() {
|
||||
let csvString = "\"abc\",\"def\",\"ghi\""
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -74,8 +69,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields6() {
|
||||
let csvString = " \"abc\", \"def\", \"ghi\""
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -83,8 +77,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields7() {
|
||||
let csvString = "\"abc\" ,\"def\" ,\"ghi\" "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -92,8 +85,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields8() {
|
||||
let csvString = " \"abc\" , \"def\" , \"ghi\" "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -101,8 +93,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields9() {
|
||||
let csvString = "\" abc \",\" def \",\" ghi \""
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, [" abc ", " def ", " ghi "])
|
||||
}
|
||||
|
@ -110,8 +101,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields10() {
|
||||
let csvString = "\tabc,\t\tdef\t,ghi\t"
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "def", "ghi"])
|
||||
}
|
||||
|
@ -119,8 +109,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields11() {
|
||||
let csvString = " abc \n def "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
|
||||
let record1 = csv.next()!
|
||||
XCTAssertEqual(record1, ["abc"])
|
||||
|
@ -130,8 +119,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields12() {
|
||||
let csvString = " \"abc \" \n \" def\" "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
|
||||
let record1 = csv.next()!
|
||||
XCTAssertEqual(record1, ["abc "])
|
||||
|
@ -141,8 +129,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields13() {
|
||||
let csvString = " abc \t\tdef\t ghi "
|
||||
let config = CSVReader.Configuration(trimFields: true, delimiter: UnicodeScalar("\t")!)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true, delimiter: "\t")
|
||||
for record in AnyIterator(csv) {
|
||||
XCTAssertEqual(record, ["abc", "", "def", "ghi"])
|
||||
}
|
||||
|
@ -150,8 +137,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields14() {
|
||||
let csvString = ""
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
let records = AnyIterator(csv).map { $0 }
|
||||
|
||||
XCTAssertEqual(records.count, 0)
|
||||
|
@ -159,8 +145,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields15() {
|
||||
let csvString = " "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
let records = AnyIterator(csv).map { $0 }
|
||||
|
||||
XCTAssertEqual(records.count, 1)
|
||||
|
@ -169,8 +154,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields16() {
|
||||
let csvString = " , "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
let records = AnyIterator(csv).map { $0 }
|
||||
|
||||
XCTAssertEqual(records.count, 1)
|
||||
|
@ -179,8 +163,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields17() {
|
||||
let csvString = " , \n"
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
let records = AnyIterator(csv).map { $0 }
|
||||
|
||||
XCTAssertEqual(records.count, 1)
|
||||
|
@ -189,8 +172,7 @@ class TrimFieldsTests: XCTestCase {
|
|||
|
||||
func testTrimFields18() {
|
||||
let csvString = " , \n "
|
||||
let config = CSVReader.Configuration(trimFields: true)
|
||||
let csv = try! CSVReader(string: csvString, configuration: config)
|
||||
let csv = try! CSVReader(string: csvString, trimFields: true)
|
||||
let records = AnyIterator(csv).map { $0 }
|
||||
|
||||
XCTAssertEqual(records.count, 2)
|
||||
|
|
|
@ -113,7 +113,7 @@ class UnicodeTests: XCTestCase {
|
|||
|
||||
private func getRecords(csv: CSVReader) -> [[String]] {
|
||||
var records = [[String]]()
|
||||
csv.enumerateRecords { (record, _, _) in
|
||||
try! csv.enumerateRows { (record, _, _) in
|
||||
records.append(record)
|
||||
}
|
||||
return records
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
//
|
||||
// Version1Tests.swift
|
||||
// CSV
|
||||
//
|
||||
// Created by Yasuhiro Hatta on 2017/06/18.
|
||||
// Copyright © 2017 yaslab. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import XCTest
|
||||
@testable import CSV
|
||||
|
||||
class Version1Tests: XCTestCase {
|
||||
|
||||
func testV1() {
|
||||
let str = "a,b,c\n1,2,3"
|
||||
let data8 = str.data(using: .utf8)!
|
||||
let data16 = str.data(using: .utf16BigEndian)!
|
||||
let data32 = str.data(using: .utf32BigEndian)!
|
||||
|
||||
let headerRow = ["a", "b", "c"]
|
||||
let row = ["1", "2", "3"]
|
||||
|
||||
do {
|
||||
let stream = InputStream(data: data8)
|
||||
var csv = try CSV(stream: stream,
|
||||
codecType: UTF8.self,
|
||||
hasHeaderRow: true,
|
||||
trimFields: false,
|
||||
delimiter: ",")
|
||||
XCTAssertEqual(csv.headerRow!, headerRow)
|
||||
XCTAssertEqual(csv.next()!, row)
|
||||
XCTAssertEqual(csv["a"], row[0])
|
||||
} catch {
|
||||
fatalError()
|
||||
}
|
||||
|
||||
do {
|
||||
let stream = InputStream(data: data16)
|
||||
var csv = try CSV(stream: stream,
|
||||
codecType: UTF16.self,
|
||||
endian: .big,
|
||||
hasHeaderRow: true,
|
||||
trimFields: false,
|
||||
delimiter: ",")
|
||||
XCTAssertEqual(csv.headerRow!, headerRow)
|
||||
XCTAssertEqual(csv.next()!, row)
|
||||
XCTAssertEqual(csv["a"], row[0])
|
||||
} catch {
|
||||
fatalError()
|
||||
}
|
||||
|
||||
do {
|
||||
let stream = InputStream(data: data32)
|
||||
var csv = try CSV(stream: stream,
|
||||
codecType: UTF32.self,
|
||||
endian: .big,
|
||||
hasHeaderRow: true,
|
||||
trimFields: false,
|
||||
delimiter: ",")
|
||||
XCTAssertEqual(csv.headerRow!, headerRow)
|
||||
XCTAssertEqual(csv.next()!, row)
|
||||
XCTAssertEqual(csv["a"], row[0])
|
||||
} catch {
|
||||
fatalError()
|
||||
}
|
||||
|
||||
do {
|
||||
let stream = InputStream(data: data8)
|
||||
var csv = try CSV(stream: stream,
|
||||
hasHeaderRow: true,
|
||||
trimFields: false,
|
||||
delimiter: ",")
|
||||
XCTAssertEqual(csv.headerRow!, headerRow)
|
||||
XCTAssertEqual(csv.next()!, row)
|
||||
XCTAssertEqual(csv["a"], row[0])
|
||||
} catch {
|
||||
fatalError()
|
||||
}
|
||||
|
||||
do {
|
||||
var csv = try CSV(string: str,
|
||||
hasHeaderRow: true,
|
||||
trimFields: false,
|
||||
delimiter: ",")
|
||||
XCTAssertEqual(csv.headerRow!, headerRow)
|
||||
XCTAssertEqual(csv.next()!, row)
|
||||
XCTAssertEqual(csv["a"], row[0])
|
||||
} catch {
|
||||
fatalError()
|
||||
}
|
||||
|
||||
_ = CSVError.cannotOpenFile
|
||||
_ = CSVError.cannotReadFile
|
||||
_ = CSVError.streamErrorHasOccurred(error: NSError())
|
||||
_ = CSVError.cannotReadHeaderRow
|
||||
_ = CSVError.stringEncodingMismatch
|
||||
_ = CSVError.stringEndianMismatch
|
||||
|
||||
_ = Endian.big
|
||||
_ = Endian.little
|
||||
_ = Endian.unknown
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue