@@ -968,15 +968,113 @@ extension Driver {
968
968
return tokens
969
969
}
970
970
971
+ // https://docs.microsoft.com/en-us/previous-versions//17w5ykft(v=vs.85)?redirectedfrom=MSDN
972
+ private static func tokenizeWindowsResponseFile( _ content: String ) -> [ String ] {
973
+ let whitespace : [ Character ] = [ " " , " \t " , " \r " , " \n " , " \0 " ]
974
+
975
+ var content = content
976
+ var tokens : [ String ] = [ ]
977
+ var token : String = " "
978
+ var quoted : Bool = false
979
+
980
+ while !content. isEmpty {
981
+ // Eat whitespace at the beginning
982
+ if token. isEmpty {
983
+ if let end = content. firstIndex ( where: { !whitespace. contains ( $0) } ) {
984
+ let count = content. distance ( from: content. startIndex, to: end)
985
+ content. removeFirst ( count)
986
+ }
987
+
988
+ // Stop if this was trailing whitespace.
989
+ if content. isEmpty { break }
990
+ }
991
+
992
+ // Treat whitespace, double quotes, and backslashes as special characters.
993
+ if let next = content. firstIndex ( where: { ( quoted ? [ " \\ " , " \" " ] : [ " " , " \t " , " \r " , " \n " , " \0 " , " \\ " , " \" " ] ) . contains ( $0) } ) {
994
+ let count = content. distance ( from: content. startIndex, to: next)
995
+ token. append ( contentsOf: content [ ..< next] )
996
+ content. removeFirst ( count)
997
+
998
+ switch content. first {
999
+ case " " , " \t " , " \r " , " \n " , " \0 " :
1000
+ tokens. append ( token)
1001
+ token = " "
1002
+ content. removeFirst ( 1 )
1003
+
1004
+ case " \\ " :
1005
+ // Backslashes are interpreted in a special manner due to use as both
1006
+ // a path separator and an escape character. Consume runs of
1007
+ // backslashes and following double quote if escaped.
1008
+ //
1009
+ // - If an even number of backslashes is followed by a double quote,
1010
+ // one backslash is emitted for each pair, and the last double quote
1011
+ // remains unconsumed. The quote will be processed as the start or
1012
+ // end of a quoted string by the tokenizer.
1013
+ //
1014
+ // - If an odd number of backslashes is followed by a double quote,
1015
+ // one backslash is emitted for each pair, and a double quote is
1016
+ // emitted for the trailing backslash and quote pair. The double
1017
+ // quote is consumed.
1018
+ //
1019
+ // - Otherwise, backslashes are treated literally.
1020
+ if let next = content. firstIndex ( where: { $0 != " \\ " } ) {
1021
+ let count = content. distance ( from: content. startIndex, to: next)
1022
+ if content [ next] == " \" " {
1023
+ token. append ( String ( repeating: " \\ " , count: count / 2 ) )
1024
+ content. removeFirst ( count)
1025
+
1026
+ if count % 2 != 0 {
1027
+ token. append ( " \" " )
1028
+ content. removeFirst ( 1 )
1029
+ }
1030
+ } else {
1031
+ token. append ( String ( repeating: " \\ " , count: count) )
1032
+ content. removeFirst ( count)
1033
+ }
1034
+ } else {
1035
+ token. append ( String ( repeating: " \\ " , count: content. count) )
1036
+ content. removeFirst ( content. count)
1037
+ }
1038
+
1039
+ case " \" " :
1040
+ content. removeFirst ( 1 )
1041
+
1042
+ if quoted, content. first == " \" " {
1043
+ // Consequtive double quotes inside a quoted string imples one quote
1044
+ token. append ( " \" " )
1045
+ content. removeFirst ( 1 )
1046
+ }
1047
+
1048
+ quoted. toggle ( )
1049
+
1050
+ default :
1051
+ fatalError ( " unexpected character ' \( content. first!) ' " )
1052
+ }
1053
+ } else {
1054
+ // Consume to end of content.
1055
+ token. append ( content)
1056
+ content. removeFirst ( content. count)
1057
+ break
1058
+ }
1059
+ }
1060
+
1061
+ if !token. isEmpty { tokens. append ( token) }
1062
+ return tokens. filter { !$0. isEmpty }
1063
+ }
1064
+
971
1065
/// Tokenize each line of the response file, omitting empty lines.
972
1066
///
973
1067
/// - Parameter content: response file's content to be tokenized.
974
1068
private static func tokenizeResponseFile( _ content: String ) -> [ String ] {
975
- #if !canImport(Darwin) && !os(Linux) && !os(Android) && !os(OpenBSD)
1069
+ #if !canImport(Darwin) && !os(Linux) && !os(Android) && !os(OpenBSD) && !os(Windows)
976
1070
#warning("Response file tokenization unimplemented for platform; behavior may be incorrect")
977
1071
#endif
1072
+ #if os(Windows)
1073
+ return tokenizeWindowsResponseFile ( content)
1074
+ #else
978
1075
return content. split { $0 == " \n " || $0 == " \r \n " }
979
- . flatMap { tokenizeResponseFileLine ( $0) }
1076
+ . flatMap { tokenizeResponseFileLine ( $0) }
1077
+ #endif
980
1078
}
981
1079
982
1080
/// Resolves the absolute path for a response file.
0 commit comments