From 2a1cd4fda8a4a8e649910d16b4dfa1ce7ae63543 Mon Sep 17 00:00:00 2001 From: chai <215380520@qq.com> Date: Fri, 12 May 2023 09:24:40 +0800 Subject: *misc --- ThirdParty/CsvHelper-master/docs/.nojekyll | 0 ThirdParty/CsvHelper-master/docs/api/index.html | 102 ++ ThirdParty/CsvHelper-master/docs/browserconfig.xml | 11 + .../CsvHelper-master/docs/change-log/index.html | 1808 ++++++++++++++++++++ .../examples/configuration/attributes/index.html | 472 +++++ .../class-maps/auto-mapping/index.html | 462 +++++ .../class-maps/constant-value/index.html | 464 +++++ .../class-maps/ignoring-properties/index.html | 463 +++++ .../examples/configuration/class-maps/index.html | 489 ++++++ .../class-maps/inline-type-conversion/index.html | 515 ++++++ .../mapping-by-alternate-names/index.html | 462 +++++ .../class-maps/mapping-by-index/index.html | 465 +++++ .../class-maps/mapping-by-name/index.html | 462 +++++ .../class-maps/mapping-duplicate-names/index.html | 464 +++++ .../class-maps/mapping-properties/index.html | 462 +++++ .../class-maps/optional-maps/index.html | 464 +++++ .../class-maps/type-conversion/index.html | 482 ++++++ .../configuration/class-maps/validation/index.html | 463 +++++ .../docs/examples/configuration/index.html | 450 +++++ .../docs/examples/csvdatareader/index.html | 460 +++++ .../CsvHelper-master/docs/examples/index.html | 465 +++++ .../docs/examples/prerequisites/index.html | 454 +++++ .../reading-and-writing-files/index.html | 480 ++++++ .../docs/examples/prerequisites/streams/index.html | 453 +++++ .../prerequisites/using-and-dispose/index.html | 444 +++++ .../reading/enumerate-class-records/index.html | 457 +++++ .../reading/get-anonymous-type-records/index.html | 451 +++++ .../examples/reading/get-class-records/index.html | 452 +++++ .../reading/get-dynamic-records/index.html | 446 +++++ .../docs/examples/reading/index.html | 469 +++++ .../examples/reading/reading-by-hand/index.html | 463 +++++ .../reading/reading-multiple-data-sets/index.html | 514 ++++++ .../reading-multiple-record-types/index.html | 497 ++++++ .../custom-type-converter/index.html | 480 ++++++ .../docs/examples/type-conversion/index.html | 596 +++++++ .../type-converter-options/index.html | 450 +++++ .../appending-to-an-existing-file/index.html | 476 ++++++ .../docs/examples/writing/index.html | 483 ++++++ .../write-anonymous-type-objects/index.html | 450 +++++ .../writing/write-class-objects/index.html | 456 +++++ .../writing/write-dynamic-objects/index.html | 454 +++++ .../docs/favicons/android-icon-144x144.png | Bin 0 -> 2931 bytes .../docs/favicons/android-icon-192x192.png | Bin 0 -> 2944 bytes .../docs/favicons/android-icon-36x36.png | Bin 0 -> 1243 bytes .../docs/favicons/android-icon-48x48.png | Bin 0 -> 1425 bytes .../docs/favicons/android-icon-72x72.png | Bin 0 -> 1784 bytes .../docs/favicons/android-icon-96x96.png | Bin 0 -> 2116 bytes .../docs/favicons/apple-icon-114x114.png | Bin 0 -> 2447 bytes .../docs/favicons/apple-icon-120x120.png | Bin 0 -> 2498 bytes .../docs/favicons/apple-icon-144x144.png | Bin 0 -> 2931 bytes .../docs/favicons/apple-icon-152x152.png | Bin 0 -> 3040 bytes .../docs/favicons/apple-icon-180x180.png | Bin 0 -> 3523 bytes .../docs/favicons/apple-icon-57x57.png | Bin 0 -> 1525 bytes .../docs/favicons/apple-icon-60x60.png | Bin 0 -> 1577 bytes .../docs/favicons/apple-icon-72x72.png | Bin 0 -> 1784 bytes .../docs/favicons/apple-icon-76x76.png | Bin 0 -> 1813 bytes .../docs/favicons/apple-icon-precomposed.png | Bin 0 -> 3516 bytes .../CsvHelper-master/docs/favicons/apple-icon.png | Bin 0 -> 3516 bytes .../docs/favicons/favicon-16x16.png | Bin 0 -> 974 bytes .../docs/favicons/favicon-32x32.png | Bin 0 -> 1208 bytes .../docs/favicons/favicon-96x96.png | Bin 0 -> 2116 bytes .../CsvHelper-master/docs/favicons/favicon.ico | Bin 0 -> 1150 bytes .../docs/favicons/ms-icon-144x144.png | Bin 0 -> 2931 bytes .../docs/favicons/ms-icon-150x150.png | Bin 0 -> 3011 bytes .../docs/favicons/ms-icon-310x310.png | Bin 0 -> 7118 bytes .../docs/favicons/ms-icon-70x70.png | Bin 0 -> 1698 bytes .../docs/getting-started/index.html | 372 ++++ .../docs/google6ad86f23ff698b61.html | 99 ++ ThirdParty/CsvHelper-master/docs/images/logo.svg | 89 + ThirdParty/CsvHelper-master/docs/index.html | 427 +++++ ThirdParty/CsvHelper-master/docs/manifest.json | 41 + .../CsvHelper-master/docs/migration/index.html | 134 ++ .../CsvHelper-master/docs/migration/v10/index.html | 102 ++ .../CsvHelper-master/docs/migration/v11/index.html | 102 ++ .../CsvHelper-master/docs/migration/v12/index.html | 102 ++ .../CsvHelper-master/docs/migration/v13/index.html | 102 ++ .../CsvHelper-master/docs/migration/v14/index.html | 102 ++ .../CsvHelper-master/docs/migration/v15/index.html | 102 ++ .../CsvHelper-master/docs/migration/v16/index.html | 102 ++ .../CsvHelper-master/docs/migration/v17/index.html | 102 ++ .../CsvHelper-master/docs/migration/v18/index.html | 102 ++ .../CsvHelper-master/docs/migration/v19/index.html | 102 ++ .../CsvHelper-master/docs/migration/v2/index.html | 102 ++ .../CsvHelper-master/docs/migration/v20/index.html | 344 ++++ .../CsvHelper-master/docs/migration/v21/index.html | 116 ++ .../CsvHelper-master/docs/migration/v22/index.html | 138 ++ .../CsvHelper-master/docs/migration/v23/index.html | 295 ++++ .../CsvHelper-master/docs/migration/v24/index.html | 116 ++ .../CsvHelper-master/docs/migration/v25/index.html | 255 +++ .../CsvHelper-master/docs/migration/v26/index.html | 267 +++ .../CsvHelper-master/docs/migration/v27/index.html | 131 ++ .../CsvHelper-master/docs/migration/v28/index.html | 131 ++ .../CsvHelper-master/docs/migration/v29/index.html | 122 ++ .../CsvHelper-master/docs/migration/v3/index.html | 102 ++ .../CsvHelper-master/docs/migration/v30/index.html | 121 ++ .../CsvHelper-master/docs/migration/v4/index.html | 102 ++ .../CsvHelper-master/docs/migration/v5/index.html | 102 ++ .../CsvHelper-master/docs/migration/v6/index.html | 102 ++ .../CsvHelper-master/docs/migration/v7/index.html | 102 ++ .../CsvHelper-master/docs/migration/v8/index.html | 102 ++ .../CsvHelper-master/docs/migration/v9/index.html | 102 ++ ThirdParty/CsvHelper-master/docs/robots.txt | 1 + ThirdParty/CsvHelper-master/docs/scripts/header.js | 6 + .../CsvHelper-master/docs/scripts/sidebar.js | 41 + ThirdParty/CsvHelper-master/docs/sitemap.xml | 1 + ThirdParty/CsvHelper-master/docs/styles/base.css | 3 + .../CsvHelper-master/docs/styles/content.css | 1 + ThirdParty/CsvHelper-master/docs/styles/header.css | 1 + ThirdParty/CsvHelper-master/docs/styles/index.css | 21 + .../CsvHelper-master/docs/styles/sidebar.css | 13 + 110 files changed, 24432 insertions(+) create mode 100644 ThirdParty/CsvHelper-master/docs/.nojekyll create mode 100644 ThirdParty/CsvHelper-master/docs/api/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/browserconfig.xml create mode 100644 ThirdParty/CsvHelper-master/docs/change-log/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/attributes/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/auto-mapping/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/constant-value/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/ignoring-properties/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/inline-type-conversion/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/mapping-by-alternate-names/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/mapping-by-index/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/mapping-by-name/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/mapping-duplicate-names/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/mapping-properties/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/optional-maps/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/type-conversion/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/class-maps/validation/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/configuration/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/csvdatareader/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/prerequisites/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/prerequisites/reading-and-writing-files/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/prerequisites/streams/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/prerequisites/using-and-dispose/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/enumerate-class-records/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/get-anonymous-type-records/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/get-class-records/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/get-dynamic-records/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/reading-by-hand/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/reading-multiple-data-sets/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/reading/reading-multiple-record-types/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/type-conversion/custom-type-converter/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/type-conversion/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/type-conversion/type-converter-options/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/writing/appending-to-an-existing-file/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/writing/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/writing/write-anonymous-type-objects/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/writing/write-class-objects/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/examples/writing/write-dynamic-objects/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-144x144.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-192x192.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-36x36.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-48x48.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-72x72.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/android-icon-96x96.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-114x114.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-120x120.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-144x144.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-152x152.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-180x180.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-57x57.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-60x60.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-72x72.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-76x76.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon-precomposed.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/apple-icon.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/favicon-16x16.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/favicon-32x32.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/favicon-96x96.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/favicon.ico create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/ms-icon-144x144.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/ms-icon-150x150.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/ms-icon-310x310.png create mode 100644 ThirdParty/CsvHelper-master/docs/favicons/ms-icon-70x70.png create mode 100644 ThirdParty/CsvHelper-master/docs/getting-started/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/google6ad86f23ff698b61.html create mode 100644 ThirdParty/CsvHelper-master/docs/images/logo.svg create mode 100644 ThirdParty/CsvHelper-master/docs/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/manifest.json create mode 100644 ThirdParty/CsvHelper-master/docs/migration/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v10/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v11/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v12/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v13/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v14/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v15/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v16/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v17/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v18/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v19/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v2/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v20/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v21/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v22/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v23/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v24/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v25/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v26/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v27/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v28/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v29/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v3/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v30/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v4/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v5/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v6/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v7/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v8/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/migration/v9/index.html create mode 100644 ThirdParty/CsvHelper-master/docs/robots.txt create mode 100644 ThirdParty/CsvHelper-master/docs/scripts/header.js create mode 100644 ThirdParty/CsvHelper-master/docs/scripts/sidebar.js create mode 100644 ThirdParty/CsvHelper-master/docs/sitemap.xml create mode 100644 ThirdParty/CsvHelper-master/docs/styles/base.css create mode 100644 ThirdParty/CsvHelper-master/docs/styles/content.css create mode 100644 ThirdParty/CsvHelper-master/docs/styles/header.css create mode 100644 ThirdParty/CsvHelper-master/docs/styles/index.css create mode 100644 ThirdParty/CsvHelper-master/docs/styles/sidebar.css (limited to 'ThirdParty/CsvHelper-master/docs') diff --git a/ThirdParty/CsvHelper-master/docs/.nojekyll b/ThirdParty/CsvHelper-master/docs/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/ThirdParty/CsvHelper-master/docs/api/index.html b/ThirdParty/CsvHelper-master/docs/api/index.html new file mode 100644 index 0000000..6e555f3 --- /dev/null +++ b/ThirdParty/CsvHelper-master/docs/api/index.html @@ -0,0 +1,102 @@ + + +
+ + + + + + + + + + + + + + + + + + + +CsvWriter
not passing leavOpen
parameter to other constructor call.Field
and RawRecord
to BadDataException
.IWriterConfiguration
into CsvWriter
constructor instead of CsvConfiguration
.MemberName
to the type converter exception message.MaxFieldSize
configuration. If max size is set and the size is reached, MaxFieldSizeException
is thrown.BufferSizeAttribute
CacheFieldsAttribute
CommentAttribute
CountBytesAttribute
DelimiterAttribute
DetectColumnCountChangesAttribute
DetectDelimiterAttribute
DetectDelimiterValueAttribute
EncodingAttribute
EscapeAttribute
ExceptionMessagesContainRawDataAttribute
HasHeaderRecordAttribute
IgnoreBlankLinesAttribute
IgnoreReferencesAttribute
IncludePrivateMembersAttribute
InjectionCharactersAttribute
InjectionEscapeCharacterAttribute
InjectionOptionsAttribute
LineBreakInQuotedFieldIsBadDataAttribute
MaxFieldSizeAttribute
ModeAttribute
NewLineAttribute
ProcessFieldAttribute
QuoteAttribute
TrimOptionsAttribute
UseNewObjectForNullReferenceMembersAttribute
WhiteSpaceCharsAttribute
params object[] constructorArgs
to TypeConverterAttribute
.Validate
mapping.IReaderRow
to ValidateArgs
.Default
and Constant
type constraints to IsAssignableFrom
.null
check in WriteRecords
.string field
and string rawRecord
to BadDataException
constructor.double MaxFieldSize { get; }
to IParserConfiguration
.bool LeaveOpen { get; }
to IWriterConfiguration
.bool LeaveOpen { get; }
to IParserConfiguration
.IReaderRow row
to ValidateArgs
constructor.TypeConverter
factories. This allows for the ability to handle many types at once.
+Code that manually handle nullable, enums, and collections were changed into factories.ConfigurationFunction.GetDelimiter
.CsvConfiguration.SanitizeInjection
flag to CsvConfiguration.InjectionOptions
enum.
+\t
and \r
to CsvConfiguration.InjectionEscapeCharacter
.CsvConfiguration.InjectionEscapeCharacter
from \t
to '
.CsvDataReader.GetDataTypeName
will use types when the schema table is overridden.CsvConfiguration.Validate
exception messages.ConfigureAwait(false)
added to async calls.CsvReader.TryGetField
throwing an exception when multiple headers are read.MemberMap.Validate
passing the wrong type into the expression call.MemberMap<T>.Convert
not working with static
methods.DateTimeConverter
and DateTimeOffsetConverter
throwing an exception other than TypeConverterException
on failure.MissingFieldFound
was not being called if IgnoreBlankLines
was off.CsvConfiguration.SanitizeForInjection
-> CsvConfiguration.InjectionOptions
bool IWriterConfiguration.SanitizeForInjection
-> InjectionOptions IWriterConfiguration.InjectionOptions
CsvConfiguration.InjectionEscapeCharacter
changed from \t
to '
.\t
and \r
to CsvConfiguration.InjectionCharacters
.GetDelimiter IParserConfiguration.GetDelimiter
delegate.CultureInfo.TextInfo.ListSeparator
if it's on every line.CsvParser.Record[]
so multiple calls wont' regenerate it.ShouldSkipRecord
is null
by default and won't get called if not set.ShouldSkipRecordArgs
holds IReaderRow
now instead of string[]
.CsvParser
constructor to take in IParserConfiguration
instead of CsvConfiguration
.CsvReader
constructor to take in IReaderConfiguration
instead of CsvConfiguration
.MemberMapData
passed to them when converting the item.BadDataFound
was being called multiple times for the same field.ShouldSkipRecordArgs
holds IReaderRow
now instead of string[]
.ConfigurationFunctions.ShouldSkipRecord
as the default is now null
.IParserConfiguration.Validate
.DateOnly
and TimeOnly
types.\t
wasn't removed and just an exception was being thrown.IParserConfiguration.DetectDelimiter
.IParserConfiguration.DetectDelimiterValues
.IWriter.WriteRecordsAsync<T>(IAsyncEnumerable<T> records, CancellationToken cancellationToken = default)
.\t
from CsvConfiguration.WhiteSpaceChars
as a default.ExceptionMessagesContainRawData
that defaults to true.init
properties. These were causing people too many problems.bool IParserConfiguration.ExceptionMessagesContainRawData { get; }
.bool IWriterConfiguration.ExceptionMessagesContainRawData { get; }
.init
removed and now have constructors with parameters.
+init
.CancellationToken
to reading and writing async methods.ShouldQuote
not having the correct field type when writing records instead of fields.CharCount
and ByteCount
when trimming.void IWriterRow.WriteConvertedField(string field)
-> void IWriterRow.WriteConvertedField(string field, Type fieldType)
void CsvWriter.WriteConvertedField(string field)
-> void CsvWriter.WriteConvertedField(string field, Type fieldType)
init
properties to set
. Once VB.NET implements init
, it can change back.CsvWriter.WriteBuffer
protected so sub classes can write fields.CsvWriter.Flush
and CsvWriter.FlushAsync
will now flush the underlying TextWriter
.delegate
methods to accept an args struct
instead of parameters. This makes it easier to understand what parameters are passed in, and allows for additional parameters to be added later without a breaking change.CsvConfiguration
constructor. The properties are now settable, so this isn't needed for VB.NET.[EnumIgnoreAttribute]
.EnumIgnoreCase
value not making it to the converter when reading.ParserMode
to CsvMode
and added the modes to CsvWriter
.Type fieldType
parameter to ShouldQuote
delegate.TypeConverterOptions.EnumIgnoreCase
(default is false). Allows EnumConverter
to ignore case when matching enum names, values, or NameAttribute
.EnumConverter
when duplicate names or values appeared in an Enum.ParserMode
-> CsvMode
IParserConfiguration.ProcessFieldBufferSize
.IWriterConfiguration.Mode
.ShouldQuote(string, IWriterRow)
-> ShouldQuote(string, Type, IWriterRow)
.EnumConverter
was changed to case sensitive by default.Quote
, read until a Delimiter
or NewLine
is found.Quote
is found that isn't preceded by an Escape
, read until a Delimiter or
NewLine` is found.TrimOptions.Trim
will be applied before these rules.init
properties on CsvConfiguration by adding a constructor that takes in all properties as optional named arguments.No changes.
+CsvConfiguration.NewLine
not defaulting to '\r\n'.CsvWriter
not keeping track of Row
and Index
.CsvConfiguration.NewLine
changed to a string
. You can now read and write any string you like for a line ending. This defaults to Environment.NewLine
. When reading, if the value is not explicitly set \r\n
, \r
, or \n
will still be used.init
.Environment.NewLine
.char? CsvConfiguration.NewLine
changed to string CsvConfiguration.NewLine
.record
to eliminate threading issues.NewLine
.Caches
enum.ReadingContext
and WritingContext
were merged into a single CsvContext
. Anywhere that used either was changed to CsvContext
.Func
s and Action
s now have their own delegate
.ConvertUsing
renamed to Convert
.ShouldQuote
now takes in IWriterRow
instead of CsvContext
.CsvConfiguration
changed from a class
to a record
.CsvConfiguration
properties changed to read only get; init;
.CsvConfiguration.NewLine
changed to char?
.CsvConfiguration.NewLineString
removed.CsvConfiguration.RegisterClassMap
moved to CsvContext
.CsvConfiguration.UnregisterClassMap
moved to CsvContext
.CsvConfiguration.AutoMap
moved to CsvContext
.IParserConfiguration
setters removed.bool IParserConfiguration.CacheFields
added.bool IParserConfiguration.LeaveOpen
added.char? IParserConfiguration.NewLine
added.ParserMode IParserConfiguration.Mode
added.IParserConfiguration.IgnoreQuotes
removed.char[] IParserConfiguration.WhiteSpaceChars
added.IReaderConfiguration
setters removed.IReaderConfiguration.TypeConverterOptionsCache
removed.IReaderConfiguration.TypeConverterCache
removed.IReaderConfiguration.Maps
removed.IReaderConfiguration.RegisterClassMap
removed.IReaderConfiguration.UnregisterClassMap
removed.IReaderConfiguration.AutoMap
removed.ISerializerConfiguration
removed and properties added to IWriterConfiguration
.IWriterConfiguration
setters removed.IWriterConfiguration.QuoteString
removed.IWriterConfiguration.TypeConverterCache
removed.IWriterConfiguration.MemberTypes
removed.IWriterConfiguration.Maps
removed.IWriterConfiguration.RegisterClassMap
removed.IWriterConfiguration.UnregisterClassMap
removed.IWriterConfiguration.AutoMap
removed.MemberMap.Optional
added.MemberMap<TClass, TMember>.ConvertUsing
renamed to Convert
.CsvFieldReader
removed.CsvParser.Read
returns boolean
instead of string[]
.CsvParser
constructors that take in a FieldReader
removed.CsvParser[int index]
added to retrieve fields after a Read
.CsvSerializer
removed.IFieldReader
removed.IParser.ByteCount
added.IParser.CharCount
added.IParser.Count
added.IParser[int index]
added.IParser.Record
added.IParser.RawRecord
added.IParser.Row
added.IParser.RawRow
added.IParser.Read
returns bool
instead of string[]
.IParser.ReadAsync
returns bool
instead of string[]
.IReader.Parser
removed.int IReaderRow.ColumnCount
added.int IReaderRow.CurrentIndex
added.string[] IReaderRow.HeaderRecord
added.IParser IReaderRow.Parser
added.ISerializer
removed.string[] IWriterRow.HeaderRecord
added.int IWriterRow.Row
added.int IWriterRow.Index
added.RecordBuilder
removed.IParameterMapper
to BooleanFalseValuesAttribute
, BooleanTrueValuesAttribute
, ConstantAttribute
, CultureInfoAttribute
, DateTimeStylesAttribute
, DefaultAttribute
, FormatAttribute
, HeaderPrefixAttribute
, IgnoreAttribute
, NameIndexAttribute
, NullValuesAttribute
, NumberStylesAttribute
, OptionalAttribute
, and TypeConverterAttribute
.MapTypeConverterOption
to MemberMapTypeConverterOptions
.TypeConverterOptions.NumberStyle
to TypeConverterOptions.NumberStyles
.ReflectionHelper.CreateInstance<T>
.ReflectionHelper.CreateInstance
.ReflectionHelper.CreateInstanceWithoutContractResolver
.NameAttribute
added interface IParameterMapper
.IndexAttribute
added interface IParameterMapper
.Action<bool, string[], int, ReadingContext> IReaderConfiguration.HeaderValidated
-> Action<InvalidHeader[], ReadingContext> IReaderConfiguration.HeaderValidated
Action<bool, string[], int, ReadingContext> CsvConfiguration.HeaderValidated
-> Action<InvalidHeader[], ReadingContext> CsvConfiguration.HeaderValidated
ConfigurationFunctions.HeaderValidated
signature changed from (bool isValid, string[] headerNames, int headerNameIndex, ReadingContext context)
to (InvalidHeader[] invalidHeaders, ReadingContext context)
CsvReader.ValidateHeader(ClassMap map)
-> CsvReader.ValidateHeader(ClassMap map, List<InvalidHeader> invalidHeaders)
HeaderValidationException.HeaderNames
.HeaderValidationException.HeaderNameIndex
.InvalidHeader[] HeaderValidationException.InvalidHeaders
.[Name]
attribute on enum values.Enum
that will be a default for all enum types.ObjectDisposedException
when CsvReader
is disposed. A message hint was added to help the user understand what went wrong.Func<ReadingContext, int, string> IReaderConfiguration.GetDynamicPropertyName
.Func<ReadingContext, int, string> CsvConfiguration.GetDynamicPropertyName
.IgnoreAttribute
to ignore the whole property treey if put on a reference property when auto mapped.CsvDataReader.FieldCount
was throwing an exception if there were no records.CsvDataReader.GetOrdinal
issue where it wasn't doing a case-insensitive match after a failed case-sensitive match. Run values through PrepareHeaderForMatch
.null
to WriteField
didn't output a field.ReflectionHelper
caching not always unique.DecimalConverter
and DoubleConverter
to match MS's recommendations.DataReader.GetValues
not working when column and rows have different count.System.Threading.Tasks.Extensions
to 4.5.2 due to loading error of Microsoft.Bcl.AsyncInterfaces
.IMemberReferenceMapper
to IgnoreAttribute
.IAsyncDispose
on writing classes.<PackageReference Include="System.Threading.Tasks.Extensions" Version="4.5.3" />
to net45
.<PackageReference Include="System.Threading.Tasks.Extensions" Version="4.5.3" />
to net47
.<PackageReference Include="System.Threading.Tasks.Extensions" Version="4.5.3" />
to netstandard2.0
.IWriter
added interface IAsyncDisposable
for net47
and netstandard2.1
.ISerializer
added interface IAsyncDisposable
for net47
and netstandard2.1
.WritingContext
added interface IAsyncDisposable
for net47
and netstandard2.1
.CsvWriter
added methods public async ValueTask DisposeAsync()
and protected virtual async ValueTask DisposeAsync(bool disposing)
for net47
and netstandard
.CsvSerializer
added methods public async ValueTask DisposeAsync()
and protected virtual async ValueTask DisposeAsync(bool disposing)
for net47
and netstandard
.WritingContext
added methods public async ValueTask DisposeAsync()
and protected virtual async ValueTask DisposeAsync(bool disposing)
for net47
and netstandard
.netstandard2.1
build.CRLF
, CR
, LF
, or Environment.NewLine
.Configuration
to CsvConfiguration
to avoid namespace conflicts.GetRecordsAsync
and WriteRecordsAsync
.<PackageReference Include="System.Reflection.TypeExtensions" Version="4.4.0" />
from netstandard2.0
.<PackageReference Include="System.Reflection.TypeExtensions" Version="4.4.0" />
from netstandard2.1
.<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" Version="1.1.0" />
to net47
.<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" Version="1.1.0" />
to netstandard2.0
.ClassMap.AutoMap()
-> ClassMap.AutoMap(CultureInfo)
CsvParser.CsvParser(TextReader)
-> CsvParser.CsvParser(TextReader, CultureInfo)
CsvParser.CsvParser(TextReader, bool)
-> CsvParser.CsvParser(TextReader, CultureInfo, bool)
CsvReader.CsvReader(TextReader)
-> CsvReader.CsvReader(TextReader, CultureInfo)
CsvReader.CsvReader(TextReader, bool)
-> CsvReader.CsvReader(TextReader, CultureInfo, bool)
CsvSerializer.CsvSerializer(TextWriter)
-> CsvSerializer.CsvSerializer(TextWriter, CultureInfo)
CsvSerializer.CsvSerializer(TextWriter, bool)
-> CsvSerializer.CsvSerializer(TextWriter, CultureInfo, bool)
CsvWriter.CsvWriter(TextWriter)
-> CsvWriter.CsvWriter(TextWriter, CultureInfo)
CsvWriter.CsvWriter(TextWriter, bool)
-> CsvWriter.CsvWriter(TextWriter, CultureInfo, bool)
Factory.CreateParser(TextReader)
-> Factory.CreateParser(TextReader, CultureInfo)
Factory.CreateReader(TextReader)
-> Factory.CreateReader(TextReader, CultureInfo)
Factory.CreateWriter(TextWriter)
-> Factory.CreateWriter(TextWriter, CultureInfo)
IFactory.CreateParser(TextReader)
-> IFactory.CreateParser(TextReader, CultureInfo)
IFactory.CreateReader(TextReader)
-> IFactory.CreateReader(TextReader, CultureInfo)
IFactory.CreateWriter(TextWriter)
-> IFactory.CreateWriter(TextWriter, CultureInfo)
ISerializerConfiguration.NewLine
.ISerializerConfiguration.NewLineString
.Configuration.NewLine
.Configuration.NewLineString
.Configuration.Configuration()
parameterless constructor.IMemberMapper
or IMemberReferenceMapper
to be loaded. All existing attributes added these and implemented the interface.Configuration
to CsvConfiguration
.IAsyncEnumerable<T> CsvReader.GetRecordsAsync<T>()
IAsyncEnumerable<T> CsvReader.GetRecordsAsync<T>(T anonymousTypeDefinition)
IAsyncEnumerable<object> CsvReader.GetRecordsAsync(Type type)
IAsyncEnumerable<T> CsvReader.EnumerateRecordsAsync<T>(T record)
Task CsvWriter.WriteRecordsAsync(IEnumerable records)
Task CsvWriter.WriteRecordsAsync<T>(IEnumerable<T> records)
IAsyncEnumerable<T> IReader.GetRecordsAsync<T>()
IAsyncEnumerable<T> IReader.GetRecordsAsync<T>(T anonymousTypeDefinition)
IAsyncEnumerable<object> IReader.GetRecordsAsync(Type type)
IAsyncEnumerable<T> IReader.EnumerateRecordsAsync<T>(T record)
Task IWriter.WriteRecordsAsync(IEnumerable records)
Task IWriter.WriteRecordsAsync<T>(IEnumerable<T> records)
RawRecord
adding spaces if TrimOptions.Trim
is used.Type
will throw exception by default.CsvReader.ReadAsync
wasn't behaving the same as CsvReader.Read
.Configuration
to pass in the CultureInfo
. When passing a culture in, the Delimiter
will be set to CultureInfo.TextInfo.ListSeparator
.IComparer<string> IWriterConfiguration.DynamicPropertySort
.IComparer<string> Configuration.DynamicPropertySort
.QuoteAllFields
, QuoteNoFields
, QuoteRequiredChars
, and BuildREquiredQuoteChars
in favor of ShouldQuote
function.IWriterConfiguration.QuoteAllFields
.IWriterConfiguration.QuoteNoFields
.IWriterConfiguration.QuoteRequiredChars
.IWriterConfiguration.BuildRequiredQuoteChars
.Configuration.QuoteAllFields
.Configuration.QuoteNoFields
.Configuration.QuoteRequiredChars
.Configuration.BuildRequiredQuoteChars
.Func<string, WritingContext, bool> IWriterConfiguration.ShouldQuote
.Func<string, WritingContext, bool> Configuration.ShouldQuote
.NamedIndexCache
type from Tuple<string, int>
to (string, int)
.ValidationException
is now abstract
.IReaderConfiguration.ReadingExceptionOccurred
type changed from Action<CsvHelperException>
to Func<CsvHelperException, bool>
.Configuration.ReadingExceptionOccurred
type changed from Action<CsvHelperException>
to Func<CsvHelperException, bool>
.NamedIndexCache
type from Tuple<string, int>
to (string, int)
. This adds a dependency to System.ValueTuple
on .NET 4.5.bool IParserConfiguration.LineBreakInQuotedFieldIsBadData
.bool Configuration.LineBreakInQuotedFieldIsBadData
.IReaderConfiguration.PrepareHeaderForMatch
type from Func<string, string>
to Func<string, int, string>
.Configuration.PrepareHeaderForMatch
type from Func<string, string>
to Func<string, int, string>
.char ISerializerConfiguration.Escape
.char IParserConfiguration.Escape
.char Configuration.Escape
.WriteAsync
wasn't calling SanitizeForInjection
.leaveOpen
parameter in CsvParser
constructor was hard coded.This release contains changes from 8.3.0 and 8.2.0.
+This has been unlisted in nuget because of a breaking change before it. The changes are in 9.0.0.
+This has been unlisted in nuget because of a breaking change. The changes are in 9.0.0.
+OptionalAttribute
.IHasMapOptions : IHasOptional
.MemberMapBuilder : IHasOptional
.MemberMapBuilder : IHasOptionalOptions
.ConfigurationFunctions
.IgnoreBlankLines
wasn't being checked in GetField<T>(int index, ITypeConverter converter)
.IsOptional
mapping option.IsOptional
mapping option.GetField
was calling the ObjectResolver
.ObjectResolver
fallback causing a StackOverflowException
.IReaderConfiguration.IgnoreReferences
.IWriterConfiguration.IgnoreReferences
.CsvWriter
that allows for leaveOpen
to be set.CsvWriter.Dispos
e able to be called multiple times.ConfigureAwait(false)
to all async calls.ObjectResolver
.IReadingContext
and IWritingContext
interfaces. ReadingContext
and WritingContext
are used directly now.Configuration.PrepareHeaderForMatch
on header name to get property name for dynamic object.IWritingContext.TypeActions
signature changed.ObjectResolver
to create internal classes RecordManager
, ExpressionManager
, RecordCreatorFactory
, and RecordHydrator
, RecordWriterFactory
.IObjectResolver.Resolve<T>( params object[] constructorArgs )
method.IWriter.WriteRecords<T>( IEnumerable<T> records )
method.TypeConverterException
constructors signatures changed.Map<TClass>.References( expression )
back in.DefaultTypeConverterException
message. The generated message wasn't being used.ReadingExceptionOccurred
callback to GetRecord
methods when an exception occurs.ISerializerConfiguration.Quote
.ClassMap<TClass>.References( expression, constructorArs )
. Use sub property mapping instead.ClassMap<TClass>.ConstructUsing( expression ). Use the
ObjectResolver` instead.Configuration
/IReaderConfiguration
/IWriterConfiguration
bool PrefixReferenceHeaders
to Func<Type, string, string> ReferenceHeaderPrefix
. The function takes in the member type and member name and returns the prefix.MemberReferenceMap.Prefix()
method.ParameterReferenceMap.Prefix()
method.Configuration
/IReaderConfiguration
/IWriterConfiguration
ClassMap AutoMap<T>()
to ClassMap<T> AutoMap<T>()
TypeConverterException
constructors parameter from ReadingContext
to IReadingContext
.DefaultTypeConverter.ConvertFromString
not convertible exception.PrepareHeaderForMatch
calls.IEnumerable
.CsvWriter
on Dispose
.ShouldSkipRecord
not called if the parser returns null
for an end of stream.ShouldUseConstructorParameters
returns false
if there are no constructors.ConvertUsing
or Constant
are used.CsvParser
.CsvSerializer
.ConvertUsing
implementation for writing.IEnumerable
properties.ClassMapBuilder
to build maps on the fly without a mapping class.IDynamicMetaObjectProvider
objects. DynamicObject
and ExpandoObject
are the 2 most common.null
fields to be written.IDictionary
type converters.leaveOpen
flag to constructors to not dispose of underlying TextReader
and TextWriter
.CsvHelperException
and removed the string data.ConvertUsing
by caching the named indexes.null
when reading.IsHeaderCaseSensitive
, IgnoreHeaderWhiteSpace
, and TrimHeaders
and added a config for PrepareHeaderForMatch
that is a function. Both the header field name and the property name are ran through this method before matching against each other.ConstructUsing
works with reference maps.ConstructUsing
can use initializers.struct
.GetField
.WriteField
.string
formats for TypeConverterOptions
.ReIndex
and GetMaxIndex
on CsvClassMap
public
.Flush
method to the writer so NextRecord
just writes a line ending. This will allow users to not write a line ending if they want.SerializableAttribute
to exception classes. It was removed previously because of netstandard1.x not having it available.ByteArrayConverter
.Property
naming to Member
since both properties and fields are used.TypeConverterFactory
is now instance of Configuration
instead of a static.Configuration
flags to callbacks with default functionality to let the user change the functionality if they want.CsvClassMapCollection[type]
choosing the wrong type when multiple types on the inheritance tree are mapped.Configuration.ShouldSkipRecord
method always overrides the Configuration.SkipEmptyRecords
setting.ConvertToString
to get the method for ITypeConverter
instead of the actual converter. This is so the overridden implementation will be used instead of a random method with the same name.ReflectionHelper.CreateInstance
for the static delegate cache.IsFieldBad
by marking unquoted fields with quote chars as bad only when Configuration.IgnoreQuotes
is false
.Constant
not working with null
.object ICsvReader.GetField( int index, ITypeConverter converter )
object ICsvReader.GetField( string name, ITypeConverter converter )
object ICsvReader.GetField( string name, int index, ITypeConverter converter )
void ICsvWriter.WriteField( Type type, object field )
void ICsvWriter.WriteField( Type type, object field, ITypeConverter converter )
void ICsvWriter.WriteRecord( Type type, object record )
ICsvReaderRow
and into ICsvReader
.IEnumerable<T> GetRecords<T>()
IEnumerable<object> GetRecords( Type type )
void ClearRecordCache<T>()
void ClearRecordCache( Type type )
void ClearRecordCache()
CanConvertTo
and CanConvertFrom
from the type converters because there is no need for them.CsvHelperException
and removed the string data.WriteRecord
to not call NextRecord
.IgnorePrivateAccessor
to IncludePrivateProperties
to be more clear on intention.CsvHelperException
.IsHeaderCaseSensitive
, IgnoreHeaderWhiteSpace
, and TrimHeaders
and added PrepareHeaderForMatch
.DateTime
and DateTimeOffset
converters to not work when the string
is spaces to match what all the other converters do. The .NET Framework DateTime
and DateTimeOffset
converters will convert a string
of all spaces into MinValue
, so we are diverging from that a little.ReadHeader
to not set CurrentRecord
to null
.BadDataCallback
to take in a ReadingContext
instead of a string
.Csv
prefix from all classes except CsvReader
, CsvParser
, CsvWriter
, and CsvSerializer
.null
values since there is no common standard that could be found.boolean
values of yes
, y
, no
, n
since it's not a standard boolean. true
, false
, 1
, 0
still work.,
instead of ListSeparator.Flush
method to the writer.Property
naming to Member
.Configuration
s ThrowOnBadData
, IgnoreReadingExceptions
, SkipEmptyRecords
, and WillThrowOnMissingField
in favor of function callbacks.TypeConverterFactory
to TypeConverterCache
TypeConverterOptionsFactory
to TypeConverterOptionsCache
Configuration.HeaderValidatedCallback
to Configuration.HeaderValidated
Configuration.MissingFieldFoundCallback
to Configuration.MissingFieldFound
Configuration.ReadingExceptionCallback
to Configuration.ReadingExceptionOccurred
Configuration.BadDataFoundCallback
to Configuration.BadDataFound
ICsvParser
to IParser
FieldReader
to CsvFieldReader
ICsvReader
to IReader
ICsvReaderRow
to IReaderRow
ICsvSerializer
to ISerializer
ICsvWriter
to IWriter
ICsvWriterRow
to IWriterRow
CsvClassMapCollection[type]
choosing the wrong type when multiple types on the inheritance tree are mapped.TypeInfo
compatibility stuff internal to not cause conflicts.CsvReader.ReadHeader
so headers can be read without reading the first row.SerializableAttribute
to all exceptions.ShouldSkipRecord
not working on rows before header.TryGetField
with named index returning wrong value.DateTimeConverter
to the list of default converters.UseNewObjectForNullReferenceProperties
is off.Prefix( string prefix = null)
="0001"
TrimFields
to CsvWriter.Names
property on CsvPropertyNameCollection
to get raw list of property names.CsvClassMap<T>
are now public to more easily allow mapping during runtime.DateTimeOffset
converter.UseExcelLeadingZerosFormatForNumerics = true
.Most of the configuration done via class maps can also be done using attributes.
+Identifier,name,IsBool,Constant
+1,one,yes,a
+2,two,no,b
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+[Delimiter(",")]
+[CultureInfo("")] // Set CultureInfo to InvariantCulture
+public class Foo
+{
+ [Name("Identifier")]
+ public int Id { get; set; }
+
+ [Index(1)]
+ public string Name { get; set; }
+
+ [BooleanTrueValues("yes")]
+ [BooleanFalseValues("no")]
+ public bool IsBool { get; set; }
+
+ [Constant("bar")]
+ public string Constant { get; set; }
+
+ [Optional]
+ public string Optional { get; set; }
+
+ [Ignore]
+ public string Ignored { get; set; }
+}
+
+
+
+ If you don't supply a map to the configuration, one is automatically created for you on the fly. You can call auto mapping directly in your class map also. You may want to do this if you have a large number of properties that will be set up correctly by default, and only need to make a couple changes.
+Id,The Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ AutoMap(CultureInfo.InvariantCulture);
+ Map(m => m.Name).Name("The Name");
+ }
+}
+
+
+ You can set a constant value to a property instead of mapping it to a field.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public bool IsDirty { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ Map(m => m.IsDirty).Constant(true);
+ }
+}
+
+
+ When you use auto mapping in your class map, every property will get mapped. If there are properties that you don't want mapped, you can ignore them.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public bool IsDirty { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ AutoMap(CultureInfo.InvariantCulture);
+ Map(m => m.IsDirty).Ignore();
+ }
+}
+
+
+ Topics | ++ |
---|---|
Mapping Properties | +Mapping to properties. | +
Mapping by Name | +Mapping properties by header name. | +
Mapping by Alternate Names | +Mapping properties that may be one of many names. | +
Mapping Duplicate Names | +Mapping properties that have duplicate header names. | +
Mapping by Index | +Mapping properties by header index position. | +
Auto Mapping | +Automatic mapping. | +
Ignoring Properties | +Ignoring mapped properites. | +
Constant Value | +Setting a constant value for a property. | +
Type Conversion | +Using a specific type converter. | +
Inline Type Conversion | +Convert a field to a type inline. | +
Optional Maps | +Map a property only if it exists. | +
Validation | +Validate a field value. | +
If you don't want to write a full ITypeConverter
implementation, you can specify a function that will do the same thing.
Id,Name,Json
+1,one,"{ ""Foo"": ""Bar"" }"
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public Json Json { get; set; }
+}
+
+public class Json
+{
+ public string Foo { get; set; }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ Map(m => m.Json).Convert(row => JsonConvert.DeserializeObject<Json>(row.GetField("Json")));
+ }
+}
+
+void Main()
+{
+ var records = new List<Foo>
+ {
+ new Foo { Id = 1, Name = "one" }
+ };
+
+ using (var writer = new StreamWriter("path\\to\\file.csv"))
+ using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.WriteRecords(records);
+
+ writer.ToString().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public Json Json { get; set; }
+}
+
+public class Json
+{
+ public string Foo { get; set; }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ Map(m => m.Json).Convert(o => JsonConvert.SerializeObject(o));
+ }
+}
+
+Id,Name,Json
+1,one,"{""Id"":1,""Name"":""one"",""Json"":null}"
+
+
+ If you have a header name that could vary, you can specify multiple header names.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Name("TheId", "Id");
+ Map(m => m.Name).Name("TheName", "Name");
+ }
+}
+
+
+ If your data doesn't have a header you can map by index instead of name. You can't rely on the order of class properties in .NET, so if you're not mapping by name, make sure you specify an index.
+1,one
+
+void Main()
+{
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ HasHeaderRecord = false,
+ };
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, config))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Index(0);
+ Map(m => m.Name).Index(1);
+ }
+}
+
+
+ If your property names don't match your class names, you can map the property to the column by name.
+ColumnA,ColumnB
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Name("ColumnA");
+ Map(m => m.Name).Name("ColumnB");
+ }
+}
+
+
+ Sometimes you have duplicate header names. This is handled through a header name index. The name index is the index of how many occurrences of that header name there are, not the position of the header.
+Id,Name,Name
+1,first,last
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string FirstName { get set; }
+ public string LastName { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.FirstName).Name("Name").NameIndex(0);
+ Map(m => m.LastName).Name("Name").NameIndex(1);
+ }
+}
+
+
+ This will map the properties of a class to the header names of the CSV data. The mapping needs to be registered in the context. This example is identical to not using a class mapping at all. The headers match the property names.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ }
+}
+
+
+ If you have data that may or may not have a header, you can make the mapping optional.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public DateTimeOffset? Date { get; set; }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ Map(m => m.Date).Optional();
+ }
+}
+
+
+ If you need to convert to or from a non-standard .NET type, you can supply a type converter to use for a property.
+Id,Name,Json
+1,one,"{ ""Foo"": ""Bar"" }"
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public Json Json { get; set; }
+}
+
+public class Json
+{
+ public string Foo { get; set; }
+}
+
+public class JsonConverter<T> : DefaultTypeConverter
+{
+ public override object ConvertFromString(string text, IReaderRow row, MemberMapData memberMapData)
+ {
+ return JsonConvert.DeserializeObject<T>(text);
+ }
+
+ public override string ConvertToString(object value, IWriterRow row, MemberMapData memberMapData)
+ {
+ return JsonConvert.SerializeObject(value);
+ }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ Map(m => m.Json).TypeConverter<JsonConverter<Json>>();
+ }
+}
+
+
+ If you want to ensure your data conforms to some sort of standard, you can validate it.
+Id,Name
+1,on-e
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ public DateTimeOffset? Date { get; set; }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name).Validate(field => !field.Contains("-"));
+ }
+}
+
+
+ + | + |
---|---|
Class Maps | +Configure CSV structure with a class map. | +
Attributes | +Configure CSV structure with attributes. | +
The question on how to load a data table using CsvHelper came up so often that I just built the functionality in.
+CsvDataReader
implements IDataReader
. This means it has all the capabilities of a forward only data reader. There is really no reason to use this class directly over using CsvReader
. CsvDataReader
requires an instance of CsvReader
and uses it internally to do it's work.
Loading a DataTable
in CsvHelper is simple. By default, a table will be loaded with all columns populated as strings. For the reader to be ready after instantiation, the first row needs to be read immediately, so you need to make any configuration changes before creating an instance of CsvDataReader.
using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+{
+ // Do any configuration to `CsvReader` before creating CsvDataReader.
+ using (var dr = new CsvDataReader(csv))
+ {
+ var dt = new DataTable();
+ dt.Load(dr);
+ }
+}
+
+If you want to specify columns and column types, the data table will be loaded with the types automatically converted.
+using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+{
+ // Do any configuration to `CsvReader` before creating CsvDataReader.
+ using (var dr = new CsvDataReader(csv))
+ {
+ var dt = new DataTable();
+ dt.Columns.Add("Id", typeof(int));
+ dt.Columns.Add("Name", typeof(string));
+
+ dt.Load(dr);
+ }
+}
+
+
+ Sections | ++ |
---|---|
Prerequisites | +Implied knowledge when using CsvHelper. | +
Reading | +Reading CSV data. | +
Writing | +Writing CSV data. | +
Configuration | +Configuring the behavior of CsvHelper to work with your CSV data or custom class structures. | +
Type Conversion | +Using type conversion to convert CSV fields to and from .NET types. | +
DataReader | +Using a DataTable to read CSV data. | +
Here are some prerequisites that are needed for using CsvHelper. These are .NET basics that are implied knowledge when using CsvHelper. Microsoft has excellent documentation that can you can use to learn more.
+Topics | ++ |
---|---|
Using and Dispose | ++ |
Reading and Writing Files | ++ |
Streams | ++ |
To open a file for reading or writing, we can use System.IO.File
.
using (var stream = File.OpenRead("path\\to\\file.csv"))
+{
+}
+
+using (var stream = File.OpenWrite("path\\to\\file.csv"))
+{
+}
+
+These both return a FileStream
for working with our file. Since our data is text, we will need to use a StreamReader
and StreamWriter
to read and write the text.
using (var stream = File.OpenRead("path\\to\\file.csv"))
+using (var reader = new StreamReader(stream))
+{
+}
+
+using (var stream = File.OpenWrite("path\\to\\file.csv"))
+using (var writer = new StreamWriter(stream))
+{
+}
+
+StreamReader
and StreamWriter
have shortcuts for doing this.
using (var reader = new StreamReader("path\\to\\file.csv"))
+{
+}
+
+using (var writer = new StreamWriter("path\\to\\file.csv"))
+{
+}
+
+CsvHelper doesn't know anything about your encoding, so if you have a specific encoding, you'll need to specify that in your stream.
+using (var reader = new StreamReader("path\\to\\file.csv", Encoding.UTF8))
+{
+}
+
+using (var writer = new StreamWriter("path\\to\\file.csv", Encoding.UTF8))
+{
+}
+
+CsvReader
and CsvWriter
take a TextReader
and TextWriter
in their constructors. TextReader
and TextWriter
are abstract
classes for reading and writing text. StreamReader
inherits TextReader
and StreamWriter
inherits TextWriter
, so we can use those with CsvReader
and CsvWriter
.
using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader))
+{
+}
+
+using (var writer = new StreamWriter("path\\to\\file.csv"))
+using (var csv = new CsvWriter(writer))
+{
+}
+
+
+ When reading from a stream, if you need to go back to the beginning of the stream, you can use the Stream.Position
property.
using (var stream = new File.OpenRead("path\\to\\file"))
+using (var reader = new StreamReader(stream))
+{
+ // Read file content.
+ var content = reader.ReadToEnd();
+
+ // Go back to beginning of the stream.
+ stream.Position = 0;
+
+ // Read file content again.
+ content = reader.ReadToEnd();
+}
+
+When writing to a file, you need to flush the writer for the data to be written to the stream. StreamWriter
contains an internal buffer and the data is only written to the stream when the buffer is full, or Flush
is called. Flush
is automatically called when a using
block exits.
using (var stream = new File.OpenWrite("path\\to\\file"))
+using (var writer = new StreamWriter(stream))
+{
+ writer.WriteLine("Foo");
+ writer.Flush(); // Data is written from the writer buffer to the stream.
+} // Flush is also called here.
+
+
+ Whenever you have an object the implements IDisposable
, you need to dispose of the resource when you're done with it. Most classes that use unmanaged resources will implement IDisposable
. This means a lot of classes in the System.IO
namespace will need to be disposed of.
The best practice to dispose of an object when you're done with it is to wrap the code in a using
block. When the using
block exits, the resource will automatically be disposed of as soon as possible.
using (var stream = new MemoryStream())
+{
+ // Use the stream.
+}
+// The stream will be disposed of as soon as possible.
+
+If you need to keep keep it around for a while and dispose of it later, using
does some error handling for you, so it's still a good idea to use it instead of calling Dispose
directly. There is some debate on whether this is a good idea because it doesn't show intent.
var stream = new MemoryStream();
+// Later in a different part of your code.
+using (stream) { }
+
+
+ Convert CSV rows into a class object that is re-used on every iteration of the enumerable. Each enumeration will hydrate the given record, but only the mapped members. If you supplied a map and didn't map one of the members, that member will not get hydrated with the current row's data. Be careful. Any methods that you call on the projection that force the evaluation of the IEnumerable
, such as ToList()
, you will get a list where all the records are the same instance you provided that is hydrated with the last record in the CSV file.
Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ var record = new Foo();
+ var records = csv.EnumerateRecords(record);
+ foreach (var r in records)
+ {
+ // r is the same instance as record.
+ }
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+
+ Convert CSV rows into anonymous type objects. You just need to supply the anonymous type definition.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ var anonymousTypeDefinition = new
+ {
+ Id = default(int),
+ Name = string.Empty
+ };
+ var records = csv.GetRecords(anonymousTypeDefinition);
+ }
+}
+
+
+ Convert CSV rows into class objects.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ var records = csv.GetRecords<Foo>();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+
+ Convert CSV rows into dynamic
objects. Since there is no way to tell what type the properties should be, all the properties on the dynamic object are strings.
Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ var records = csv.GetRecords<dynamic>();
+ }
+}
+
+
+ Topics | ++ |
---|---|
Get Class Records | ++ |
Get Dynamic Records | ++ |
Get Anonymous Type Records | ++ |
Enumerate Class Records | ++ |
Reading by Hand | ++ |
Reading Multiple Data Sets | ++ |
Reading Multiple Record Types | ++ |
Sometimes it's easier to not try and configure a mapping to match your class definition for various reasons. It's usually only a few more lines of code to just read the rows by hand instead.
+Id,Name
+1,one
+
+void Main()
+{
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ var records = new List<Foo>();
+ csv.Read();
+ csv.ReadHeader();
+ while (csv.Read())
+ {
+ var record = new Foo
+ {
+ Id = csv.GetField<int>("Id"),
+ Name = csv.GetField("Name")
+ };
+ records.Add(record);
+ }
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+
+ For some reason there are CSV files out there that contain multiple sets of CSV data in them. You should be able to read files like this without issue. You will need to detect when to change class types you are retreiving.
+FooId,Name
+1,foo
+
+BarId,Name
+07a0fca2-1b1c-4e44-b1be-c2b05da5afc7,bar
+
+void Main()
+{
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ IgnoreBlankLines = false,
+ };
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, config))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.Context.RegisterClassMap<BarMap>();
+ var fooRecords = new List<Foo>();
+ var barRecords = new List<Bar>();
+ var isHeader = true;
+ while (csv.Read())
+ {
+ if (isHeader)
+ {
+ csv.ReadHeader();
+ isHeader = false;
+ continue;
+ }
+
+ if (string.IsNullOrEmpty(csv.GetField(0)))
+ {
+ isHeader = true;
+ continue;
+ }
+
+ switch (csv.HeaderRecord[0])
+ {
+ case "FooId":
+ fooRecords.Add(csv.GetRecord<Foo>());
+ break;
+ case "BarId":
+ barRecords.Add(csv.GetRecord<Bar>());
+ break;
+ default:
+ throw new InvalidOperationException("Unknown record type.");
+ }
+ }
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+public class Bar
+{
+ public Guid Id { get; set; }
+ public string Name { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Name("FooId");
+ Map(m => m.Name);
+ }
+}
+
+public sealed class BarMap : ClassMap<Bar>
+{
+ public BarMap()
+ {
+ Map(m => m.Id).Name("BarId");
+ Map(m => m.Name);
+ }
+}
+
+
+ If you have CSV data where each row may be a different record type, you should be able to read based on a row type or something similar.
+A,1,foo
+B,07a0fca2-1b1c-4e44-b1be-c2b05da5afc7,bar
+
+void Main()
+{
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ HasHeaderRecord = false,
+ };
+ using (var reader = new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, config))
+ {
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.Context.RegisterClassMap<BarMap>();
+ var fooRecords = new List<Foo>();
+ var barRecords = new List<Bar>();
+ while (csv.Read())
+ {
+ switch (csv.GetField(0))
+ {
+ case "A":
+ fooRecords.Add(csv.GetRecord<Foo>());
+ break;
+ case "B":
+ barRecords.Add(csv.GetRecord<Bar>());
+ break;
+ default:
+ throw new InvalidOperationException("Unknown record type.");
+ }
+ }
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+public class Bar
+{
+ public Guid Id { get; set; }
+ public string Name { get; set; }
+}
+
+public sealed class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Index(1);
+ Map(m => m.Name).Index(2);
+ }
+}
+
+public sealed class BarMap : ClassMap<Bar>
+{
+ public BarMap()
+ {
+ Map(m => m.Id).Index(1);
+ Map(m => m.Name).Index(2);
+ }
+}
+
+
+ The built in type converters will handle most situations for you, but if you find +a situation where they don't you can create your own type converter.
+You can register the converter globally or per member via an attribute or class map. +You only need to use one, but all are shown in the example.
+Id,Name,Json
+1,one,"{""foo"": ""bar""}"
+
+void Main()
+{
+ using (var reader = new new StreamReader("path\\to\\file.csv"))
+ using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+ {
+ // Register globally.
+ csv.Context.TypeConverterCache.AddConverter<JsonNode>(new JsonNodeConverter());
+ csv.Context.RegisterClassMap<FooMap>();
+ csv.GetRecords<Foo>().ToList().Dump();
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+ // Register via attribute.
+ [TypeConverter(typeof(JsonNodeConverter))]
+ public JsonNode Json { get; set; }
+}
+
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id);
+ Map(m => m.Name);
+ // Register via map.
+ Map(m => m.Json).TypeConverter<JsonNodeConverter>();
+ }
+}
+
+public class JsonNodeConverter : DefaultTypeConverter
+{
+ public override object ConvertFromString(string text, IReaderRow row, MemberMapData memberMapData)
+ {
+ return JsonSerializer.Deserialize<JsonNode>(text);
+ }
+}
+
+
+ When reading and writing a custom class will get converted to and from
+a CSV row. Each CSV field can be converted to and from a class property.
+This conversion for class properties is done via type converters.
There are many built in converters already available to you.
+CsvHelper Converter | +C# type keyword | +.NET Type | +
---|---|---|
ArrayConverter | +[ ] | +System.Array | +
BigIntegerConverter | ++ | System.Numerics.BigInteger | +
BooleanConverter | +bool | +System.Boolean | +
ByteArrayConverter | +byte[ ] | +System.Array | +
ByteConverter | +byte | +System.Byte | +
CharConverter | +char | +System.Char | +
CollectionGenericConverter | ++ | System.Collections.Generic.Collection<T>, System.Collections.Generic.List<T> | +
DateOnlyConverter | ++ | System.DateOnly | +
DateTimeConverter | ++ | System.DateTime | +
DateTimeOffsetConverter | ++ | System.DateTimeOffset | +
DecimalConverter | +decimal | +System.Decimal | +
DoubleConverter | +double | +System.Double | +
EnumConverter | +enum | +System.Enum | +
GuidConverter | ++ | System.Guid | +
IDictionaryConverter | ++ | System.Collections.Generic.Dictionary<string, string> | +
IDictionaryGenericConverter | ++ | System.Collections.Generic.Dictionary<TKey, TValue> | +
IEnumerableConverter | ++ | System.Collections.ICollection, System.Collections.IEnumerable, System.Collections.IList | +
IEnumerableGenericConverter | ++ | System.Collections.Generic.ICollection<T>, System.Collections.Generic.IEnumerable<T>, System.Collections.Generic.IList<T> | +
Int16Converter | +short | +System.Int16 | +
Int32Converter | +int | +System.Int32 | +
Int64Converter | +long | +System.Int64 | +
NullableConverter | ++ | System.Nullable<T> | +
SByteConverter | +sbyte | +System.SByte | +
SingleConverter | +float | +System.Single | +
StringConverter | +string | +System.String | +
TimeOnlyConverter | ++ | System.TimeOnly | +
UInt16Converter | +ushort | +System.UInt16 | +
UInt32Converter | +uint | +System.UInt32 | +
UInt64Converter | +ulong | +System.UInt64 | +
UriConverter | ++ | System.Uri | +
Options can be passed to the type converters.
+Most type converters use IFormattable.ToString
to write and TryParse
to read.
+Any option for these methods should be available through configuration.
public sealed class FooMap : ClassMap\<Foo\>
+{
+ public FooMap()
+ {
+ Map(m => m.DateTimeProps).TypeConverterOption.DateTimeStyles(DateTimeStyles.AllowInnerWhite | DateTimeStyles.RoundtripKind);
+ }
+}
+
+public class Foo
+{
+ [DateTimeStyles(DateTimeStyles.AllowInnerWhite | DateTimeStyles.RoundtripKind)]
+ public DateTime DateTimeProp { get; set; }
+}
+
+
+ void Main()
+{
+ var records = new List<Foo>
+ {
+ new Foo { Id = 1, Name = "one" },
+ };
+
+ // Write to a file.
+ using (var writer = new StreamWriter("path\\to\\file.csv"))
+ using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+ {
+ csv.WriteRecords(records);
+ }
+
+ records = new List<Foo>
+ {
+ new Foo { Id = 2, Name = "two" },
+ };
+
+ // Append to the file.
+ var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+ {
+ // Don't write the header again.
+ HasHeaderRecord = false,
+ };
+ using (var stream = File.Open("path\\to\\file.csv", FileMode.Append))
+ using (var writer = new StreamWriter(stream))
+ using (var csv = new CsvWriter(writer, config))
+ {
+ csv.WriteRecords(records);
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+Id,Name
+1,one
+2,two
+
+
+ When opening a CSV in an external program, a formula in a field could be ran that contains a vulnerability.
+Read more here: CSV Injection.
+Due to this issue, there is a setting InjectionOptions
that can be configured.
The list of injection characters to detect are configurable in CsvConfiguration.InjectionCharacters
+and default to =
, @
, +
, -
, \t
, \r
. An injection character can be the first character of a field
+or quoted field. i.e. =foo
or "=foo"
The InjectionOptions
values are None
(default), Escape
, Strip
, and Exception
.
No injection protection is taken.
+If an injection character is detected, a CsvWriterException
is thrown.
All injection characters at the start of a field will be removed. ===foo
will be stripped to foo
.
If an injection character is detected, the field will be prepended with the InjectionEscapeCharacter
+that defaults to '
. The field will be quoted if it is not already.
=one
-> "'=one"
"=one"
-> "'=one"
=one"two
-> "'=one""two"
This option is disabled by default because the primary goal if this library is to read and write CSV
+files. If you are storing user entered data that you haven't sanitized yourself and you're letting
+it be accessed by people that may open in Excel/Sheets/etc, you might consider enabling this feature.
+The InjectionEscapeCharacter
is not removed when reading.
When writing, you can throw an enumerable of class objects, dynamic objects, anonymous type objects, or pretty much +anything else, and it will get written.
+Topics | ++ |
---|---|
Write Class Objects | ++ |
Write Dynamic Objects | ++ |
Write Anonymous Type Objects | ++ |
Appending to an Existing File | ++ |
void Main()
+{
+ var records = new List<object>
+ {
+ new { Id = 1, Name = "one" },
+ };
+
+ using (var writer = new StreamWriter("path\\to\\file.csv"))
+ using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+ {
+ csv.WriteRecords(records);
+ }
+}
+
+Id,Name
+1,one
+
+
+ void Main()
+{
+ var records = new List<Foo>
+ {
+ new Foo { Id = 1, Name = "one" },
+ };
+
+ using (var writer = new StreamWriter("path\\to\\file.csv"))
+ using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+ {
+ csv.WriteRecords(records);
+ }
+}
+
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+Id,Name
+1,one
+
+
+ void Main()
+{
+ var records = new List<dynamic>();
+
+ dynamic record = new ExpandoObject();
+ record.Id = 1;
+ record.Name = "one";
+ records.Add(record);
+
+ using (var writer = new StringWriter())
+ using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+ {
+ csv.WriteRecords(records);
+
+ writer.ToString().Dump();
+ }
+}
+
+Id,Name
+1,one
+
+
+ PM> Install-Package CsvHelper
+
+> dotnet add package CsvHelper
+
+There is some basic .NET knowledge that is implied when using this documentation. Please look over the prequisites to make sure you have an +understanding of them. Prerequisites
+CsvHelper requires you to specify the CultureInfo
that you want to use. The culture is used to determine the default delimiter, default line ending, and formatting when type converting. You can change the configuration of any of these too if you like. Choose the appropriate culture for your data. InvariantCulture
will be the most portable for writing a file and reading it back again, so that will be used in most of the examples.
By default, CsvHelper will follow RFC 4180 and use \r\n
for writing newlines no matter what operating system
+you are running on. CsvHelper can read \r\n
, \r
, or \n
without any configuration changes. If you want to read or write in a non-standard format, you can
+change the configuration for NewLine
.
var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ NewLine = Environment.NewLine,
+};
+
+Let's say we have CSV file that looks like this.
+Id,Name
+1,one
+2,two
+
+And a class definition that looks like this.
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+If our class property names match our CSV file header names, we can read the file without any configuration.
+using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+{
+ var records = csv.GetRecords<Foo>();
+}
+
+The GetRecords<T>
method will return an IEnumerable<T>
that will yield
records.
+What this means is that only a single record is returned at a time as you iterate the records.
+That also means that only a small portion of the file is read into memory. Be careful though.
+If you do anything that executes a LINQ projection, such as calling .ToList()
, the entire file
+will be read into memory. CsvReader
is forward only, so if you want to run any LINQ queries
+against your data, you'll have to pull the whole file into memory. Just know that is what you're doing.
Let's say our CSV file names are a little different than our class properties and we don't want to +make our properties match.
+id,name
+1,one
+2,two
+
+In this case, the names are lower case. We want our property names to be Pascal Case, so we can +just change how our properties match against the header names.
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ PrepareHeaderForMatch = args => args.Header.ToLower(),
+};
+using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, config))
+{
+ var records = csv.GetRecords<Foo>();
+}
+
+Using the configuration PrepareHeaderForMatch
, we're able to change how the header matching
+is done against the property name. Both the header and the property name are ran through the
+PrepareHeaderForMatch
function. When the reader needs to find the property to set for the
+header, they will now match. You can use this function to do other things such as remove
+whitespace or other characters.
Let's say out CSV file doesn't have a header at all.
+1,one
+2,two
+
+First we need to tell the reader that there is no header record, using configuration.
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ HasHeaderRecord = false,
+};
+using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, config))
+{
+ var records = csv.GetRecords<Foo>();
+}
+
+CsvReader will use the position of the properties in the class as the index position. There is an +issue with this though. You can't rely on the ordering of class members in .NET. +We can solve this by mapping the property to a position in the CSV file.
+One way to do this is with attribute mapping.
+public class Foo
+{
+ [Index(0)]
+ public int Id { get; set; }
+
+ [Index(1)]
+ public string Name { get; set; }
+}
+
+The IndexAttribute
allows you to specify which position the CSV field is that you want to use
+for the property.
You can also map by name. Let's use our lower case header example from before and see how we can +use attributes instead of changing the header matching.
+public class Foo
+{
+ [Name("id")]
+ public int Id { get; set; }
+
+ [Name("name")]
+ public string Name { get; set; }
+}
+
+There are many other attributes you can use also.
+What if we don't have control over the class we want to map to so we can't add attributes to it?
+In this case, we can use a fluent ClassMap
to do the mapping.
public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Name("id");
+ Map(m => m.Name).Name("name");
+ }
+}
+
+To use the mapping, we need to register it in the context.
+using (var reader = new StreamReader("path\\to\\file.csv"))
+using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+{
+ csv.Context.RegisterClassMap<FooMap>();
+ var records = csv.GetRecords<Foo>();
+}
+
+Creating a class map is the recommended way of mapping files in CsvHelper because it's a +lot more powerful.
+You can also read rows by hand.
+using (var reader = new StreamReader("path\\to\file.csv"))
+using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture))
+{
+ csv.Read();
+ csv.ReadHeader();
+ while (csv.Read())
+ {
+ var record = csv.GetRecord<Foo>();
+ // Do something with the record.
+ }
+}
+
+Read
will advance row. ReadHeader
will read the row into CsvHelper as the header values.
+Separating Read
and ReadHeader
allows you to do other things with the header row before
+moving onto the next row. GetRecord
also does not advance the reader to allow you to do
+other things with the row you might need to do. You may need to GetField
for a single field
+or maybe call GetRecord
multiple times to fill more than one object.
Now let's look at how we can write CSV files. It's basically the same thing, but in reverse order.
+Let's use the same class definition as before.
+public class Foo
+{
+ public int Id { get; set; }
+ public string Name { get; set; }
+}
+
+And we have a set of records like this.
+var records = new List<Foo>
+{
+ new Foo { Id = 1, Name = "one" },
+ new Foo { Id = 2, Name = "two" },
+};
+
+We can write the records to a file without any configuration.
+using (var writer = new StreamWriter("path\\to\\file.csv"))
+using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+{
+ csv.WriteRecords(records);
+}
+
+The WriteRecords
method will write all the records to the file. After you are done writing,
+you should call writer.Flush()
to ensure that all the data in the writer's internal buffer
+has been flushed to the file. Once a using
block has exited, the writer is automatically
+flushed, so we don't have to explicitly do it here. It's recommended to always wrap any
+IDisposable
object with using
blocks. The object will dispose of itself (and in our case
+flush too) as soon as possible after the using
block has exited.
Remember how we can't rely on property order in .NET? If we are writing a class that has a header, +it doesn't matter, as long as we are reading using the headers later. If we want to position +the headers in the CSV file, we need to specify an index to guarantee it's order. It's +recommended to always set an index when writing.
+public class FooMap : ClassMap<Foo>
+{
+ public FooMap()
+ {
+ Map(m => m.Id).Index(0).Name("id");
+ Map(m => m.Name).Index(1).Name("name");
+ }
+}
+
+You can also write rows by hand.
+using (var writer = new StreamWriter("path\\to\\file.csv"))
+using (var csv = new CsvWriter(writer, CultureInfo.InvariantCulture))
+{
+ csv.WriteHeader<Foo>();
+ csv.NextRecord();
+ foreach (var record in records)
+ {
+ csv.WriteRecord(record);
+ csv.NextRecord();
+ }
+}
+
+WriteHeader
will not advance you to the next row. Separating NextRecord
from WriteHeader
+allows you to write more things in the header if you need to. WriteRecord
also will not
+advance you to the next row to give you the ability to write multiple objects or use
+WriteField
to write individual fields.
Compiles classes on the fly for extremely fast performance.
+Conservative when writing, liberal when reading.
++ Reading and writing is as simple as GetRecords<T>() and WriteRecords(records). + No configuration required. +
+Feature rich mapping and attribute systems to configure any type of CSV file to any type of class.
+Adheres to the RFC 4180 standard to ensure compatibility across systems.
++ Mode for common Linux/SerDe files where an escape character is used instead of RFC 4180's field quoting. +
+When non-standard files are read, fallback will match MS Excel parsing.
+Reading records will yield results so only one record is in memory at a time.
++ Option to use field caching when there is repeated data in a CSV file. This will reduce memory + and speed up parsing time. +
++ CsvHelper is built on .NET Standard 2.0 which allows it to run + almost everywhere. + Older versions of .NET are possible if needed. +
++ Stack Overflow + has millions of users in its community just waiting to answer your questions. + There is only one of me and I'm pretty busy. + + + +
++ Learn how to use CsvHelper using the API reference + or check out some examples. +
++ If you have a feature request or have found a bug, you can + log an issue. + Please use + Stack Overflow + if you have a question. +
+Completely free for commercial use. Dual licensed. Choose which ever license suits your needs.
+Microsoft Public License (MS-PL)
+ + +Want to contribute? Great! Here are a few guidelines.
++ You can do a one time donation through + Paypal. +
++ You can do recurring donations through + Open Collective. +
++ If you want to do something a little more fun, you can pick something + from my Amazon wish list. This + obviously doesn't go directly towards building software, but does help + keep me sane when I'm not. +
+Migration steps for major version bumps.
+ + +ConvertUsing
was renamed to Convert
.
// v19
+Map(m => m.Property).ConvertUsing(row => row.GetField<int>(0) + row.GetField<int>(1));
+
+// v20
+Map(m => m.Property).Convert(row => row.GetField<int>(0) + row.GetField<int>(1));
+
+All properties changed from get; set;
to get; init;
.
// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture);
+config.Delimiter = ";";
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ Delimiter = ";",
+}
+
+CsvConfiguration
changed from a class
to a record
.
// v19
+class MyConfig : CsvConfiguration {}
+
+// v20
+record MyConfig : CsvConfiguration {}
+
+ShouldQuote
now takes in IWriterRow
instead of CsvContext
.
// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = (field, row) => true,
+};
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = (field, context) => true,
+};
+
+Changed from enum NewLines
to char?
.
// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ NewLine = NewLines.LF,
+};
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ NewLine = '\n',
+};
+
+This was removed. Any code referencing this should be deleted.
+This moved to CsvContext
.
// v19
+csv.Configuration.RegisterClassMap<MyMap>();
+
+// v20
+csv.Context.RegisterClassMap<MyMap>();
+
+This moved to CsvContext
.
// v19
+csv.Configuration.UnregisterClassMap<MyMap>();
+
+// v20
+csv.Context.UnregisterClassMap<MyMap>();
+
+This moved to CsvContext
.
// v19
+csv.Configuration.AutoMap<MyType>();
+
+// v20
+csv.Context.AutoMap<MyType>();
+
+All setters removed.
+// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture);
+config.Delimiter = ";";
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ Delimiter = ";",
+};
+
+bool CacheFields
.bool LeaveOpen
.char? NewLine
.ParserMode Mode
.char[] WhiteSpaceChars
.bool IgnoreQuotes
.Any classes that implement IParserConfiguration
will need these changes.
All setters removed.
+// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture);
+config.Delimiter = ";";
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ Delimiter = ";",
+};
+
+TypeConverterOptionsCache
.TypeConverterCache
.Maps
.RegisterClassMap
.UnregisterClassMap
.AutoMap
.Any classes that implement IReaderConfiguration
will need these changes.
This interface was removed and it's properties were added to IWriteConfiguration
.
// v19
+class MyConfig : ISerializerConfiguration {}
+
+// v20
+class MyConfig : IWriterConfiguration {}
+
+All setters removed.
+// v19
+var config = new CsvConfiguration(CultureInfo.InvariantCulture);
+config.Delimiter = ";";
+
+// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ Delimiter = ";",
+};
+
+QuoteString
.TypeConverterCache
.MemberTypes
.Maps
.RegisterClassMap
.UnregisterClassMap
.AutoMap
.Any classes that implement IWriterConfiguration
will need these changes.
ConvertUsing
renamed to Convert
.
// v19
+Map(m => m.Property).ConvertUsing(row => row.Get(0));
+Map(m => m.Property).ConvertUsing(value => value?.ToString() ?? string.Empty);
+
+// v20
+Map(m => m.Property).Convert(row => row.Get(0));
+Map(m => m.Property).Convert(value => value?.ToString() ?? string.Empty);
+
+string[] Read()
changed to bool Read()
.
// v19
+string[] record;
+while ((record = parser.Read()) != null)
+{
+}
+
+// v20
+while (parser.Read())
+{
+ // Only get fields you need.
+ var field1 = parser[0];
+ var field2 = parser[1];
+
+ // Get all fields.
+ var record = parser.Record;
+}
+
+Constructor paramter IFieldReader fieldReader
removed from all constructors.
// v19
+var parser = new CsvParser(fieldReader);
+
+// v20
+var parser = new CsvParser();
+
+Removed. Functionality moved into CsvWriter
.
Removed. Functionality moved into CsvParser
.
long ByteCount
.long CharCount
.int Count
.string this[int index]
.string[] Record
.string RawRecord
.int Row
.int RawRow
.string[] Read
to bool Read
.Task<string[]> ReadAsync
to Task<bool> ReadAsync
.Any classes that implement IParser
will need these changes.
ICsvParser Parser
.Any classes that implement IReader
will need these changes.
int ColumnCount
.int CurrentIndex
.string[] HeaderRecord
.IParser Parser
.Any classes that implement IReaderRow
will need these changes.
Removed. Functionality moved into IWriter
.
string[] HeaderRecord
.int Row
.int Index
.Removed. Functionality moved into CsvWriter
.
enum Caches
was removed. Modifying internal caches is not supported anymore.
+Any code referencing this should be removed.
ReadingContext
and WritingContext
was merged into a single CsvContext
.
+Anywhere either of these was used should change to CsvContext
.
Any place a Func
or Action
was used now has a dedicated delegate
.
+This should only affect classes that are inheriting ClassMap
+or CsvConfiguration
.
Class removed. Code was wrapped into CsvParser
.
Property char? NewLine
changed to string NewLine
.
// v20
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ NewLine = '\r',
+};
+
+// v21
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ NewLine = "\r",
+};
+
+
+ Name change to CsvMode
.
// v21
+ParserMode.RFC4180
+
+//v22
+CsvMode.RFC4180
+
+// v21
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = (field, context) => true,
+};
+
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = (field, context, row) => true,
+};
+
+EnumConverter
was changed to case sensitive by default.
If you want Enums to ignore case, you need to set a type converter option.
+Map(m => m.Property).TypeConverterOption.EnumIgnoreCase();
+
+ProcessFieldBufferSizse
.Any class that implements IParserConfiguration
will need these changes applied to it.
Mode
.Any class that implements IWriterConfiguration
will need these changes applied to it.
All the constructor parameters were removed in favor of using +property setters. Apply this change to any of the options.
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture, delimiter = ";");
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ Delimiter = ";",
+};
+
+All delegates now take in a single struct argument.
+BadDataFound
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ BadDataFound = (field, context) =>
+ {
+ Console.WriteLine($"field: {field}");
+ Console.WriteLine($"context: {context}");
+ },
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ BadDataFound = args =>
+ {
+ Console.WriteLine($"field: {args.Field}");
+ Console.WriteLine($"context: {args.Context}");
+ },
+};
+
+ConvertFromString
+// v22
+Map(m => m.Property).Convert(row => row.GetField(0));
+
+// v23
+Map(m => m.Property).Convert(args => args.Row.GetField(0));
+
+ConvertToString
+// v22
+Map(m => m.Property).Convert(value => value.ToString());
+
+// v23
+Map(m => m.Property).Convert(args => args.Value.ToString());
+
+GetConstructor
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ GetConstructor = classType => classType.GetConstructors().First(),
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ GetConstructor = args => args.ClassType.GetConstructors().First(),
+};
+
+GetDynamicPropertyName
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ GetDynamicPropertyName = (fieldIndex, context) => $"Field{fieldIndex}";
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ GetDynamicPropertyName = args => $"Field{args.FieldIndex}",
+};
+
+HeaderValidated
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ HeaderValidated = (invalidHeaders, context) => Console.WriteLine($"Invalid headers count: {invalidHeaders.Count}"),
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ HeaderValidated = args => Console.WriteLine($"Invalid headers count: {args.InvalidHeaders.Count}"),
+};
+
+MissingFieldFound
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ MissingFieldFound = (headerNames, index, context) => Console.WriteLine($"Missing field: {headerNames[0]}"),
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ MissingFieldFound = args => Console.WriteLine($"Missing field: {args.HeaderNames[0]}"),
+};
+
+PrepareHeaderForMatch
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ PrepareHeaderForMatch = (header, fieldIndex) => header.ToLower(),
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ PrepareHeaderForMatch = args => args.Header.ToLower(),
+};
+
+ReadingExceptionOccurred
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ReadingExceptionOccurred = exception =>
+ {
+ Console.WriteLine(exception.Message);
+ throw args.Exception;
+ },
+};
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ReadingExceptionOccurred = args =>
+ {
+ Console.WriteLine(args.Exception.Message);
+ throw args.Exception;
+ },
+};
+
+ReferenceHeaderPrefix
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ReferenceHeaderPrefix = (memberType, memberName) => $"{memberName}.",
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ReferenceHeaderPrefix = args => $"{args.MemberName}.",
+};
+
+ShouldQuote
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = (field, fieldType, row) => true,
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldQuote = args => true,
+};
+
+ShouldSkipRecord
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldSkipRecord = (records) => record.Length == 0,
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldSkipRecord = args => args.Record.Length == 0,
+};
+
+ShouldUseConstructorParameters
+// v22
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldUseConstructorParameters = type => true;
+};
+
+// v23
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldUseConstructorParameters = args => true;
+};
+
+Validate
+// v22
+Map(m => m.Property).Validate(field => !string.IsNullOrEmpty(field));
+
+// v23
+Map(m => m.Property).Validate(args => !string.IsNullOrEmpty(args.Field));
+
+
+ // v23
+WriteConvertedField(field);
+
+// v24
+WriteConvertedField(field, fieldType);
+
+// v23
+WriteConvertedField(field);
+
+// v24
+WriteConvertedField(field, fieldType);
+
+
+ All delegate args had their non-parameterless constructor removed
+in favor of using init
.
BadDataFoundArgs
+// v25
+var args = new BadDataRoundArgs(field, rawRecord, context);
+
+// v26
+var args = new BadDataFoundArgs
+{
+ Field = field,
+ RawRecord = rawRecord,
+ Context = context,
+};
+
+ConvertFromStringArgs
+// v25
+var args = new ConvertFromStringArgs(row);
+
+// v26
+var args = new ConvertFromStringArgs
+{
+ Row = row,
+};
+
+ConvertToStringArgs
+// v25
+var args = new ConvertToStringArgs(value);
+
+// v26
+var args = new ConvertToStringArgs
+{
+ Value = value,
+};
+
+GetConstructorArgs
+// v25
+var args = new GetConstructorArgs(type);
+
+// v26
+var args = new GetConstructorArgs
+{
+ ClassType = type,
+};
+
+GetDynamicPropertyNameArgs
+// v25
+var args = new GetDynamicPropertyNameArgs(index, context);
+
+// v26
+var args = new GetDynamicPropertyNameArgs
+{
+ FieldIndex = index,
+ Context = context,
+};
+
+HeaderValidatedArgs
+// v25
+var args = new HeaderValidatedArgs(headers, context);
+
+// v26
+var args = new HeaderValidatedArgs
+{
+ InvalidHeaders = headers,
+ Context = context,
+};
+
+MissingFieldFoundArgs
+// v25
+var args = new MissingFieldFoundArgs(headerNames, index, context);
+
+// v26
+var args = new MissingFieldFoundArgs
+{
+ HeaderNames = headerNames,
+ Index = index,
+ Context = context,
+};
+
+PrepareHeaderForMatchArgs
+// v25
+var args = new PrepareHeaderForMatchArgs(header, fieldIndex);
+
+// v26
+var args = new PrepareHeaderForMatchArgs
+{
+ Header = header,
+ FieldIndex = fieldIndex,
+};
+
+ReadingExceptionOccurredArgs
+// v25
+var args = new ReadingExceptionOccurredArgs(exception);
+
+// v26
+var args = new ReadingExceptionOccurredArgs
+{
+ Exception = exception,
+};
+
+ReferenceHeaderPrefixArgs
+// v25
+var args = new ReferenceHeaderPrefixArgs(memberType, memberName);
+
+// v26
+var args = new ReferenceHeaderPrefixArgs
+{
+ MemberType = memberType,
+ MemberName = memberName,
+};
+
+ShouldQuoteArgs
+// v25
+var args = new ShouldQuoteArgs(field, fieldType, row);
+
+// v26
+var args = new ShouldQuoteArgs
+{
+ Field = field,
+ FieldType = fieldType,
+ Row = row,
+};
+
+ShouldSkipRecordArgs
+// v25
+var args = new ShouldSkipRecordArgs(record);
+
+// v26
+var args = new ShouldSkipRecordArgs
+{
+ Record = record,
+};
+
+ShouldUseConstructorParametersArgs
+// v25
+var args = new ShouldUseConstructorParametersArgs(parameterType);
+
+// v26
+var args = new ShouldUseConstructorParametersArgs
+{
+ ParameterType = parameterType,
+};
+
+ValidateArgs
+// v25
+var args = new ValidateArgs(field);
+
+// v26
+var args = new ValidateArgs
+{
+ Field = field,
+};
+
+
+ All delegates args objects have their init
accessor removed.
+Constructor parameters are used instead.
BadDataFoundArgs
+// v26
+var args = new BadDataFoundArgs
+{
+ Field = field,
+ RawRecord = rawRecord,
+ Context = context,
+};
+
+// v27
+var args = new BadDataRoundArgs(field, rawRecord, context);
+
+ConvertFromStringArgs
+// v26
+var args = new ConvertFromStringArgs
+{
+ Row = row,
+};
+
+// v27
+var args = new ConvertFromStringArgs(row);
+
+ConvertToStringArgs
+// v26
+var args = new ConvertToStringArgs
+{
+ Value = value,
+};
+
+// v27
+var args = new ConvertToStringArgs(value);
+
+GetConstructorArgs
+// v26
+var args = new GetConstructorArgs
+{
+ ClassType = type,
+};
+
+// v27
+var args = new GetConstructorArgs(type);
+
+GetDynamicPropertyNameArgs
+// v26
+var args = new GetDynamicPropertyNameArgs
+{
+ FieldIndex = index,
+ Context = context,
+};
+
+// v27
+var args = new GetDynamicPropertyNameArgs(index, context);
+
+HeaderValidatedArgs
+// v26
+var args = new HeaderValidatedArgs
+{
+ InvalidHeaders = headers,
+ Context = context,
+};
+
+// v27
+var args = new HeaderValidatedArgs(headers, context);
+
+MissingFieldFoundArgs
+// v26
+var args = new MissingFieldFoundArgs
+{
+ HeaderNames = headerNames,
+ Index = index,
+ Context = context,
+};
+
+// v27
+var args = new MissingFieldFoundArgs(headerNames, index, context);
+
+PrepareHeaderForMatchArgs
+// v26
+var args = new PrepareHeaderForMatchArgs
+{
+ Header = header,
+ FieldIndex = fieldIndex,
+};
+
+// v27
+var args = new PrepareHeaderForMatchArgs(header, fieldIndex);
+
+ReadingExceptionOccurredArgs
+// v26
+var args = new ReadingExceptionOccurredArgs
+{
+ Exception = exception,
+};
+
+// v27
+var args = new ReadingExceptionOccurredArgs(exception);
+
+ReferenceHeaderPrefixArgs
+// v26
+var args = new ReferenceHeaderPrefixArgs
+{
+ MemberType = memberType,
+ MemberName = memberName,
+};
+
+// v27
+var args = new ReferenceHeaderPrefixArgs(memberType, memberName);
+
+ShouldQuoteArgs
+// v26
+var args = new ShouldQuoteArgs
+{
+ Field = field,
+ FieldType = fieldType,
+ Row = row,
+};
+
+// v27
+var args = new ShouldQuoteArgs(field, fieldType, row);
+
+ShouldSkipRecordArgs
+// v26
+var args = new ShouldSkipRecordArgs
+{
+ Record = record,
+};
+
+// v27
+var args = new ShouldSkipRecordArgs(record);
+
+ShouldUseConstructorParametersArgs
+// v26
+var args = new ShouldUseConstructorParametersArgs
+{
+ ParameterType = parameterType,
+};
+
+// v27
+var args = new ShouldUseConstructorParametersArgs(parameterType);
+
+ValidateArgs
+// v26
+var args = new ValidateArgs
+{
+ Field = field,
+};
+
+// v27
+var args = new ValidateArgs(field);
+
+bool ExceptionMessagesContainRawData { get; }
.Any class that implements IParserConfiguration
will need these changes
+applied to it.
bool ExceptionMessagesContainRawData { get; }
.Any class that implements IParserConfiguration
will need these changes
+applied to it.
\t
from the array of default characters.If you are expecting \t
to be trimmed, you will need to add this to the
+whitespace characters.
var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ WhiteSpaceChars = new[] { ' ', '\t' },
+};
+
+bool DetectDelimiter { get; }
.string[] DetectDelimiterValues { get; }
.Any class that implements IParserConfiguration
will need these changes
+applied to it.
Task WriteRecordsAsync<T>(IAsyncEnumerable<T> records, CancellationToken cancellationToken = default)
.
+This does not apply to projects that reference the net45
version of CsvHelper.Any class that implements IWriter
will need these changes applied to it.
string Delimiter { get; }
.Any class that implements IParser
will need these changes applied to it.
string[] ShouldSkipRecordArgs.Record
changed to IReaderRow ShouldSkipRecordArgs.Row
.// 27
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldSkipRecord = args => args.Record.Length < 10;
+};
+
+// 28
+
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldSkipRecord = args => args.Row.Parser.Record.Length < 10;
+};
+
+ConfigurationFunctions.ShouldSkipRecord
.null
can be used in place of this now, and is the default.
var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ ShouldSkipRecord = null
+};
+
+Implement the Validate
method.
bool CsvConfiguration.SanitizeInjection
changed to InjectionOptions CsvConfiguration.InjectionOptions
.
// 28
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ SanitizeForInjection = true,
+}
+
+// 29
+var config = new CsvConfiguration(CultureInfo.InvariantCulture)
+{
+ InjectionOptions = InjectionOptions.Escape,
+}
+
+bool IWriterConfiguration.SanitizeInjection
changed to InjectionOptions IWriterConfiguration.InjectionOptions
.
+Any class that implements IWriterConfiguration
will need this changed.
GetDelimiter IParserConfiguration.GetDelimiter
was added.
+Any class that implements IParserConfiguration
will need to add this.
// 29
+throw new BadDataException(context);
+
+// 30
+throw new BadDataException(field, rawRecord, context);
+
+Any class that implements IParserConfiguration
will need to add property double MaxFixFieldSize { get; }
.
+Any class that implements IParserConfiguration
will need to add property bool LeaveOpen { get; }
.
ixFieldSize { get; }. Any class that implements
IWriterConfigurationwill need to add property
bool LeaveO
// 29
+var args = new ValidateArgs(field);
+
+// 30
+var args = new ValidateArgs(field, row);
+
+
+