| 1 | /* |
| 2 | * Copyright (C) 2019 Apple Inc. All rights reserved. |
| 3 | * |
| 4 | * Redistribution and use in source and binary forms, with or without |
| 5 | * modification, are permitted provided that the following conditions |
| 6 | * are met: |
| 7 | * 1. Redistributions of source code must retain the above copyright |
| 8 | * notice, this list of conditions and the following disclaimer. |
| 9 | * 2. Redistributions in binary form must reproduce the above copyright |
| 10 | * notice, this list of conditions and the following disclaimer in the |
| 11 | * documentation and/or other materials provided with the distribution. |
| 12 | * |
| 13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' |
| 14 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, |
| 15 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
| 16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS |
| 17 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
| 18 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
| 19 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
| 20 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
| 21 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
| 22 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF |
| 23 | * THE POSSIBILITY OF SUCH DAMAGE. |
| 24 | */ |
| 25 | |
| 26 | #include "config.h" |
| 27 | #include "WHLSLParser.h" |
| 28 | |
| 29 | #if ENABLE(WEBGPU) |
| 30 | |
| 31 | #include "WHLSLAddressSpace.h" |
| 32 | #include "WHLSLEntryPointType.h" |
| 33 | #include <wtf/dtoa.h> |
| 34 | #include <wtf/text/StringBuilder.h> |
| 35 | #include <wtf/text/StringConcatenate.h> |
| 36 | |
| 37 | namespace WebCore { |
| 38 | |
| 39 | namespace WHLSL { |
| 40 | |
| 41 | // FIXME: Return a better error code from this, and report it to JavaScript. |
| 42 | auto Parser::parse(Program& program, StringView stringView, Mode mode) -> Optional<Error> |
| 43 | { |
| 44 | m_lexer = Lexer(stringView); |
| 45 | m_mode = mode; |
| 46 | |
| 47 | while (!m_lexer.isFullyConsumed()) { |
| 48 | if (tryType(Lexer::Token::Type::Semicolon)) { |
| 49 | m_lexer.consumeToken(); |
| 50 | continue; |
| 51 | } |
| 52 | |
| 53 | { |
| 54 | auto typeDefinition = backtrackingScope<Expected<AST::TypeDefinition, Error>>([&]() { |
| 55 | return parseTypeDefinition(); |
| 56 | }); |
| 57 | if (typeDefinition) { |
| 58 | auto success = program.append(WTFMove(*typeDefinition)); |
| 59 | if (!success) |
| 60 | return WTF::nullopt; |
| 61 | continue; |
| 62 | } |
| 63 | } |
| 64 | |
| 65 | { |
| 66 | auto structureDefinition = backtrackingScope<Expected<AST::StructureDefinition, Error>>([&]() { |
| 67 | return parseStructureDefinition(); |
| 68 | }); |
| 69 | if (structureDefinition) { |
| 70 | auto success = program.append(WTFMove(*structureDefinition)); |
| 71 | if (!success) |
| 72 | return WTF::nullopt; |
| 73 | continue; |
| 74 | } |
| 75 | } |
| 76 | |
| 77 | { |
| 78 | auto enumerationDefinition = backtrackingScope<Expected<AST::EnumerationDefinition, Error>>([&]() { |
| 79 | return parseEnumerationDefinition(); |
| 80 | }); |
| 81 | if (enumerationDefinition) { |
| 82 | auto success = program.append(WTFMove(*enumerationDefinition)); |
| 83 | if (!success) |
| 84 | return WTF::nullopt; |
| 85 | continue; |
| 86 | } |
| 87 | } |
| 88 | |
| 89 | Optional<Error> error; |
| 90 | { |
| 91 | auto functionDefinition = backtrackingScope<Expected<AST::FunctionDefinition, Error>>([&]() { |
| 92 | return parseFunctionDefinition(); |
| 93 | }); |
| 94 | if (functionDefinition) { |
| 95 | auto success = program.append(WTFMove(*functionDefinition)); |
| 96 | if (!success) |
| 97 | return WTF::nullopt; |
| 98 | continue; |
| 99 | } |
| 100 | error = functionDefinition.error(); |
| 101 | } |
| 102 | |
| 103 | if (m_mode == Mode::StandardLibrary) { |
| 104 | auto nativeFunctionDeclaration = backtrackingScope<Expected<AST::NativeFunctionDeclaration, Error>>([&]() { |
| 105 | return parseNativeFunctionDeclaration(); |
| 106 | }); |
| 107 | if (nativeFunctionDeclaration) { |
| 108 | auto success = program.append(WTFMove(*nativeFunctionDeclaration)); |
| 109 | if (!success) |
| 110 | return WTF::nullopt; |
| 111 | continue; |
| 112 | } |
| 113 | } |
| 114 | |
| 115 | if (m_mode == Mode::StandardLibrary) { |
| 116 | auto nativeTypeDeclaration = backtrackingScope<Expected<AST::NativeTypeDeclaration, Error>>([&]() { |
| 117 | return parseNativeTypeDeclaration(); |
| 118 | }); |
| 119 | if (nativeTypeDeclaration) { |
| 120 | auto success = program.append(WTFMove(*nativeTypeDeclaration)); |
| 121 | if (!success) |
| 122 | return WTF::nullopt; |
| 123 | continue; |
| 124 | } |
| 125 | } |
| 126 | |
| 127 | return WTFMove(*error); |
| 128 | } |
| 129 | return WTF::nullopt; |
| 130 | } |
| 131 | |
| 132 | auto Parser::fail(const String& message, TryToPeek tryToPeek) -> Unexpected<Error> |
| 133 | { |
| 134 | if (tryToPeek == TryToPeek::Yes) { |
| 135 | if (auto nextToken = peek()) |
| 136 | return Unexpected<Error>(Error(m_lexer.errorString(*nextToken, message))); |
| 137 | } |
| 138 | return Unexpected<Error>(Error(makeString("Cannot lex: " , message))); |
| 139 | } |
| 140 | |
| 141 | auto Parser::peek() -> Expected<Lexer::Token, Error> |
| 142 | { |
| 143 | if (auto token = m_lexer.consumeToken()) { |
| 144 | m_lexer.unconsumeToken(Lexer::Token(*token)); |
| 145 | return *token; |
| 146 | } |
| 147 | return fail("Cannot consume token"_str , TryToPeek::No); |
| 148 | } |
| 149 | |
| 150 | Optional<Lexer::Token> Parser::tryType(Lexer::Token::Type type) |
| 151 | { |
| 152 | if (auto token = m_lexer.consumeToken()) { |
| 153 | if (token->type == type) |
| 154 | return token; |
| 155 | m_lexer.unconsumeToken(Lexer::Token(*token)); |
| 156 | } |
| 157 | return WTF::nullopt; |
| 158 | } |
| 159 | |
| 160 | Optional<Lexer::Token> Parser::tryTypes(Vector<Lexer::Token::Type> types) |
| 161 | { |
| 162 | if (auto token = m_lexer.consumeToken()) { |
| 163 | if (std::find(types.begin(), types.end(), token->type) != types.end()) |
| 164 | return token; |
| 165 | m_lexer.unconsumeToken(Lexer::Token(*token)); |
| 166 | } |
| 167 | return WTF::nullopt; |
| 168 | } |
| 169 | |
| 170 | auto Parser::consumeType(Lexer::Token::Type type) -> Expected<Lexer::Token, Error> |
| 171 | { |
| 172 | if (auto token = m_lexer.consumeToken()) { |
| 173 | if (token->type == type) |
| 174 | return *token; |
| 175 | return fail(makeString("Unexpected token (expected " , Lexer::Token::typeName(type), " got " , Lexer::Token::typeName(token->type), ")" )); |
| 176 | } |
| 177 | return fail(makeString("Cannot consume token (expected " , Lexer::Token::typeName(type), ")" )); |
| 178 | } |
| 179 | |
| 180 | auto Parser::consumeTypes(Vector<Lexer::Token::Type> types) -> Expected<Lexer::Token, Error> |
| 181 | { |
| 182 | auto buildExpectedString = [&]() -> String { |
| 183 | StringBuilder builder; |
| 184 | builder.append("[" ); |
| 185 | for (unsigned i = 0; i < types.size(); ++i) { |
| 186 | if (i > 0) |
| 187 | builder.append(", " ); |
| 188 | builder.append(Lexer::Token::typeName(types[i])); |
| 189 | } |
| 190 | builder.append("]" ); |
| 191 | return builder.toString(); |
| 192 | }; |
| 193 | |
| 194 | if (auto token = m_lexer.consumeToken()) { |
| 195 | if (std::find(types.begin(), types.end(), token->type) != types.end()) |
| 196 | return *token; |
| 197 | return fail(makeString("Unexpected token (expected one of " , buildExpectedString(), " got " , Lexer::Token::typeName(token->type), ")" )); |
| 198 | } |
| 199 | return fail(makeString("Cannot consume token (expected " , buildExpectedString(), ")" )); |
| 200 | } |
| 201 | |
| 202 | static int digitValue(UChar character) |
| 203 | { |
| 204 | if (character >= '0' && character <= '9') |
| 205 | return character - '0'; |
| 206 | if (character >= 'a' && character <= 'f') |
| 207 | return character - 'a' + 10; |
| 208 | return character - 'A' + 10; |
| 209 | } |
| 210 | |
| 211 | static Expected<int, Parser::Error> intLiteralToInt(StringView text) |
| 212 | { |
| 213 | bool negate = false; |
| 214 | if (text.startsWith("-"_str )) { |
| 215 | negate = true; |
| 216 | text = text.substring(1); |
| 217 | } |
| 218 | int base = 10; |
| 219 | if (text.startsWith("0x"_str )) { |
| 220 | text = text.substring(2); |
| 221 | base = 16; |
| 222 | } |
| 223 | |
| 224 | unsigned result = 0; |
| 225 | for (auto codePoint : text.codePoints()) { |
| 226 | unsigned digit = digitValue(codePoint); |
| 227 | auto previous = result; |
| 228 | result = result * base + digit; |
| 229 | if (result < previous) |
| 230 | return Unexpected<Parser::Error>(Parser::Error(makeString("int literal " , text, " is out of bounds" ))); |
| 231 | } |
| 232 | if (negate) { |
| 233 | static_assert(std::numeric_limits<long long int>::min() < std::numeric_limits<int>::min(), "long long needs to be bigger than an int" ); |
| 234 | if (static_cast<long long>(result) > std::abs(static_cast<long long>(std::numeric_limits<int>::min()))) |
| 235 | return Unexpected<Parser::Error>(Parser::Error(makeString("int literal " , text, " is out of bounds" ))); |
| 236 | return { static_cast<int>(static_cast<long long>(result) * 1) }; |
| 237 | } |
| 238 | if (result > static_cast<unsigned>(std::numeric_limits<int>::max())) |
| 239 | return Unexpected<Parser::Error>(Parser::Error(makeString("int literal " , text, " is out of bounds" ))); |
| 240 | return { static_cast<int>(result) }; |
| 241 | } |
| 242 | |
| 243 | static Expected<unsigned, Parser::Error> uintLiteralToUint(StringView text) |
| 244 | { |
| 245 | unsigned base = 10; |
| 246 | if (text.startsWith("0x"_str )) { |
| 247 | text = text.substring(2); |
| 248 | base = 16; |
| 249 | } |
| 250 | ASSERT(text.endsWith("u" )); |
| 251 | text = text.substring(0, text.length() - 1); |
| 252 | unsigned result = 0; |
| 253 | for (auto codePoint : text.codePoints()) { |
| 254 | unsigned digit = digitValue(codePoint); |
| 255 | auto previous = result; |
| 256 | result = result * base + digit; |
| 257 | if (result < previous) |
| 258 | return Unexpected<Parser::Error>(Parser::Error(makeString("uint literal " , text, " is out of bounds" ))); |
| 259 | } |
| 260 | return { result }; |
| 261 | } |
| 262 | |
| 263 | static Expected<float, Parser::Error> floatLiteralToFloat(StringView text) |
| 264 | { |
| 265 | size_t parsedLength; |
| 266 | auto result = parseDouble(text, parsedLength); |
| 267 | if (parsedLength != text.length()) |
| 268 | return Unexpected<Parser::Error>(Parser::Error(makeString("Cannot parse float " , text))); |
| 269 | return static_cast<float>(result); |
| 270 | } |
| 271 | |
| 272 | auto Parser::consumeIntegralLiteral() -> Expected<Variant<int, unsigned>, Error> |
| 273 | { |
| 274 | auto integralLiteralToken = consumeTypes({ Lexer::Token::Type::IntLiteral, Lexer::Token::Type::UintLiteral }); |
| 275 | if (!integralLiteralToken) |
| 276 | return Unexpected<Error>(integralLiteralToken.error()); |
| 277 | |
| 278 | switch (integralLiteralToken->type) { |
| 279 | case Lexer::Token::Type::IntLiteral: { |
| 280 | auto result = intLiteralToInt(integralLiteralToken->stringView); |
| 281 | if (result) |
| 282 | return {{ *result }}; |
| 283 | return Unexpected<Error>(result.error()); |
| 284 | } |
| 285 | default: { |
| 286 | ASSERT(integralLiteralToken->type == Lexer::Token::Type::UintLiteral); |
| 287 | auto result = uintLiteralToUint(integralLiteralToken->stringView); |
| 288 | if (result) |
| 289 | return {{ *result }}; |
| 290 | return Unexpected<Error>(result.error()); |
| 291 | } |
| 292 | } |
| 293 | } |
| 294 | |
| 295 | auto Parser::consumeNonNegativeIntegralLiteral() -> Expected<unsigned, Error> |
| 296 | { |
| 297 | auto integralLiteral = consumeIntegralLiteral(); |
| 298 | if (!integralLiteral) |
| 299 | return Unexpected<Error>(integralLiteral.error()); |
| 300 | auto result = WTF::visit(WTF::makeVisitor([](int x) -> Optional<unsigned> { |
| 301 | if (x < 0) |
| 302 | return WTF::nullopt; |
| 303 | return x; |
| 304 | }, [](unsigned x) -> Optional<unsigned> { |
| 305 | return x; |
| 306 | }), *integralLiteral); |
| 307 | if (result) |
| 308 | return *result; |
| 309 | return fail("int literal is negative"_str ); |
| 310 | } |
| 311 | |
| 312 | static Expected<unsigned, Parser::Error> recognizeSimpleUnsignedInteger(StringView stringView) |
| 313 | { |
| 314 | unsigned result = 0; |
| 315 | if (stringView.length() < 1) |
| 316 | return Unexpected<Parser::Error>(Parser::Error(makeString("Simple unsigned literal " , stringView, " is too short" ))); |
| 317 | for (auto codePoint : stringView.codePoints()) { |
| 318 | if (codePoint < '0' || codePoint > '9') |
| 319 | return Unexpected<Parser::Error>(Parser::Error(makeString("Simple unsigned literal " , stringView, " isn't of the form [0-9]+" ))); |
| 320 | auto previous = result; |
| 321 | result = result * 10 + (codePoint - '0'); |
| 322 | if (result < previous) |
| 323 | return Unexpected<Parser::Error>(Parser::Error(makeString("Simple unsigned literal " , stringView, " is out of bounds" ))); |
| 324 | } |
| 325 | return result; |
| 326 | } |
| 327 | |
| 328 | auto Parser::parseConstantExpression() -> Expected<AST::ConstantExpression, Error> |
| 329 | { |
| 330 | auto type = consumeTypes({ |
| 331 | Lexer::Token::Type::IntLiteral, |
| 332 | Lexer::Token::Type::UintLiteral, |
| 333 | Lexer::Token::Type::FloatLiteral, |
| 334 | Lexer::Token::Type::Null, |
| 335 | Lexer::Token::Type::True, |
| 336 | Lexer::Token::Type::False, |
| 337 | Lexer::Token::Type::Identifier, |
| 338 | }); |
| 339 | if (!type) |
| 340 | return Unexpected<Error>(type.error()); |
| 341 | |
| 342 | switch (type->type) { |
| 343 | case Lexer::Token::Type::IntLiteral: { |
| 344 | auto value = intLiteralToInt(type->stringView); |
| 345 | if (!value) |
| 346 | return Unexpected<Error>(value.error()); |
| 347 | return {{ AST::IntegerLiteral(WTFMove(*type), *value) }}; |
| 348 | } |
| 349 | case Lexer::Token::Type::UintLiteral: { |
| 350 | auto value = uintLiteralToUint(type->stringView); |
| 351 | if (!value) |
| 352 | return Unexpected<Error>(value.error()); |
| 353 | return {{ AST::UnsignedIntegerLiteral(WTFMove(*type), *value) }}; |
| 354 | } |
| 355 | case Lexer::Token::Type::FloatLiteral: { |
| 356 | auto value = floatLiteralToFloat(type->stringView); |
| 357 | if (!value) |
| 358 | return Unexpected<Error>(value.error()); |
| 359 | return {{ AST::FloatLiteral(WTFMove(*type), *value) }}; |
| 360 | } |
| 361 | case Lexer::Token::Type::Null: |
| 362 | return { AST::NullLiteral(WTFMove(*type)) }; |
| 363 | case Lexer::Token::Type::True: |
| 364 | return { AST::BooleanLiteral(WTFMove(*type), true) }; |
| 365 | case Lexer::Token::Type::False: |
| 366 | return { AST::BooleanLiteral(WTFMove(*type), false) }; |
| 367 | default: { |
| 368 | ASSERT(type->type == Lexer::Token::Type::Identifier); |
| 369 | auto origin = consumeType(Lexer::Token::Type::FullStop); |
| 370 | if (!origin) |
| 371 | return Unexpected<Error>(origin.error()); |
| 372 | auto next = consumeType(Lexer::Token::Type::Identifier); |
| 373 | if (!next) |
| 374 | return Unexpected<Error>(next.error()); |
| 375 | return { AST::EnumerationMemberLiteral(WTFMove(*origin), type->stringView.toString(), next->stringView.toString()) }; |
| 376 | } |
| 377 | } |
| 378 | } |
| 379 | |
| 380 | auto Parser::parseTypeArgument() -> Expected<AST::TypeArgument, Error> |
| 381 | { |
| 382 | auto constantExpression = backtrackingScope<Expected<AST::ConstantExpression, Error>>([&]() { |
| 383 | return parseConstantExpression(); |
| 384 | }); |
| 385 | if (constantExpression) |
| 386 | return AST::TypeArgument(WTFMove(*constantExpression)); |
| 387 | auto result = consumeType(Lexer::Token::Type::Identifier); |
| 388 | if (!result) |
| 389 | return Unexpected<Error>(result.error()); |
| 390 | return AST::TypeArgument(makeUniqueRef<AST::TypeReference>(Lexer::Token(*result), result->stringView.toString(), AST::TypeArguments())); |
| 391 | } |
| 392 | |
| 393 | auto Parser::parseTypeArguments() -> Expected<AST::TypeArguments, Error> |
| 394 | { |
| 395 | auto typeArguments = backtrackingScope<Optional<AST::TypeArguments>>([&]() -> Optional<AST::TypeArguments> { |
| 396 | auto lessThanSign = consumeType(Lexer::Token::Type::LessThanSign); |
| 397 | if (!lessThanSign) |
| 398 | return WTF::nullopt; |
| 399 | AST::TypeArguments typeArguments; |
| 400 | auto typeArgument = parseTypeArgument(); |
| 401 | if (!typeArgument) |
| 402 | return WTF::nullopt; |
| 403 | typeArguments.append(WTFMove(*typeArgument)); |
| 404 | while (tryType(Lexer::Token::Type::Comma)) { |
| 405 | auto typeArgument = parseTypeArgument(); |
| 406 | if (!typeArgument) |
| 407 | return WTF::nullopt; |
| 408 | typeArguments.append(WTFMove(*typeArgument)); |
| 409 | } |
| 410 | auto greaterThanSign = consumeType(Lexer::Token::Type::GreaterThanSign); |
| 411 | if (!greaterThanSign) |
| 412 | return WTF::nullopt; |
| 413 | return typeArguments; |
| 414 | }); |
| 415 | if (typeArguments) |
| 416 | return WTFMove(*typeArguments); |
| 417 | |
| 418 | typeArguments = backtrackingScope<Optional<AST::TypeArguments>>([&]() -> Optional<AST::TypeArguments> { |
| 419 | auto lessThanSign = consumeType(Lexer::Token::Type::LessThanSign); |
| 420 | if (!lessThanSign) |
| 421 | return WTF::nullopt; |
| 422 | auto greaterThanSign = consumeType(Lexer::Token::Type::GreaterThanSign); |
| 423 | if (!greaterThanSign) |
| 424 | return WTF::nullopt; |
| 425 | return {{ }}; |
| 426 | }); |
| 427 | if (typeArguments) |
| 428 | return WTFMove(*typeArguments); |
| 429 | |
| 430 | return AST::TypeArguments(); |
| 431 | } |
| 432 | |
| 433 | auto Parser::parseTypeSuffixAbbreviated() -> Expected<TypeSuffixAbbreviated, Error> |
| 434 | { |
| 435 | auto token = consumeTypes({ Lexer::Token::Type::Star, Lexer::Token::Type::SquareBracketPair, Lexer::Token::Type::LeftSquareBracket }); |
| 436 | if (!token) |
| 437 | return Unexpected<Error>(token.error()); |
| 438 | if (token->type == Lexer::Token::Type::LeftSquareBracket) { |
| 439 | auto numElements = consumeNonNegativeIntegralLiteral(); |
| 440 | if (!numElements) |
| 441 | return Unexpected<Error>(numElements.error()); |
| 442 | auto rightSquareBracket = consumeType(Lexer::Token::Type::RightSquareBracket); |
| 443 | if (!rightSquareBracket) |
| 444 | return Unexpected<Error>(rightSquareBracket.error()); |
| 445 | return {{ *token, *numElements }}; |
| 446 | } |
| 447 | return {{ *token, WTF::nullopt }}; |
| 448 | } |
| 449 | |
| 450 | auto Parser::parseTypeSuffixNonAbbreviated() -> Expected<TypeSuffixNonAbbreviated, Error> |
| 451 | { |
| 452 | auto token = consumeTypes({ Lexer::Token::Type::Star, Lexer::Token::Type::SquareBracketPair, Lexer::Token::Type::LeftSquareBracket }); |
| 453 | if (!token) |
| 454 | return Unexpected<Error>(token.error()); |
| 455 | if (token->type == Lexer::Token::Type::LeftSquareBracket) { |
| 456 | auto numElements = consumeNonNegativeIntegralLiteral(); |
| 457 | if (!numElements) |
| 458 | return Unexpected<Error>(numElements.error()); |
| 459 | auto rightSquareBracket = consumeType(Lexer::Token::Type::RightSquareBracket); |
| 460 | if (!rightSquareBracket) |
| 461 | return Unexpected<Error>(rightSquareBracket.error()); |
| 462 | return {{ *token, WTF::nullopt, *numElements }}; |
| 463 | } |
| 464 | auto addressSpaceToken = consumeTypes({ Lexer::Token::Type::Constant, Lexer::Token::Type::Device, Lexer::Token::Type::Threadgroup, Lexer::Token::Type::Thread}); |
| 465 | if (!addressSpaceToken) |
| 466 | return Unexpected<Error>(addressSpaceToken.error()); |
| 467 | AST::AddressSpace addressSpace; |
| 468 | switch (addressSpaceToken->type) { |
| 469 | case Lexer::Token::Type::Constant: |
| 470 | addressSpace = AST::AddressSpace::Constant; |
| 471 | break; |
| 472 | case Lexer::Token::Type::Device: |
| 473 | addressSpace = AST::AddressSpace::Device; |
| 474 | break; |
| 475 | case Lexer::Token::Type::Threadgroup: |
| 476 | addressSpace = AST::AddressSpace::Threadgroup; |
| 477 | break; |
| 478 | default: |
| 479 | ASSERT(addressSpaceToken->type == Lexer::Token::Type::Thread); |
| 480 | addressSpace = AST::AddressSpace::Thread; |
| 481 | break; |
| 482 | } |
| 483 | return {{ *token, { addressSpace }, WTF::nullopt }}; |
| 484 | } |
| 485 | |
| 486 | auto Parser::parseAddressSpaceType() -> Expected<UniqueRef<AST::UnnamedType>, Error> |
| 487 | { |
| 488 | auto addressSpaceToken = consumeTypes({ Lexer::Token::Type::Constant, Lexer::Token::Type::Device, Lexer::Token::Type::Threadgroup, Lexer::Token::Type::Thread}); |
| 489 | if (!addressSpaceToken) |
| 490 | return Unexpected<Error>(addressSpaceToken.error()); |
| 491 | AST::AddressSpace addressSpace; |
| 492 | switch (addressSpaceToken->type) { |
| 493 | case Lexer::Token::Type::Constant: |
| 494 | addressSpace = AST::AddressSpace::Constant; |
| 495 | break; |
| 496 | case Lexer::Token::Type::Device: |
| 497 | addressSpace = AST::AddressSpace::Device; |
| 498 | break; |
| 499 | case Lexer::Token::Type::Threadgroup: |
| 500 | addressSpace = AST::AddressSpace::Threadgroup; |
| 501 | break; |
| 502 | default: |
| 503 | ASSERT(addressSpaceToken->type == Lexer::Token::Type::Thread); |
| 504 | addressSpace = AST::AddressSpace::Thread; |
| 505 | break; |
| 506 | } |
| 507 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 508 | if (!name) |
| 509 | return Unexpected<Error>(name.error()); |
| 510 | auto typeArguments = parseTypeArguments(); |
| 511 | if (!typeArguments) |
| 512 | return Unexpected<Error>(typeArguments.error()); |
| 513 | |
| 514 | auto constructTypeFromSuffixAbbreviated = [&](const TypeSuffixAbbreviated& typeSuffixAbbreviated, UniqueRef<AST::UnnamedType>&& previous) -> UniqueRef<AST::UnnamedType> { |
| 515 | switch (typeSuffixAbbreviated.token.type) { |
| 516 | case Lexer::Token::Type::Star: |
| 517 | return { makeUniqueRef<AST::PointerType>(Lexer::Token(typeSuffixAbbreviated.token), addressSpace, WTFMove(previous)) }; |
| 518 | case Lexer::Token::Type::SquareBracketPair: |
| 519 | return { makeUniqueRef<AST::ArrayReferenceType>(Lexer::Token(typeSuffixAbbreviated.token), addressSpace, WTFMove(previous)) }; |
| 520 | default: |
| 521 | ASSERT(typeSuffixAbbreviated.token.type == Lexer::Token::Type::LeftSquareBracket); |
| 522 | return { makeUniqueRef<AST::ArrayType>(Lexer::Token(typeSuffixAbbreviated.token), WTFMove(previous), *typeSuffixAbbreviated.numElements) }; |
| 523 | } |
| 524 | }; |
| 525 | |
| 526 | auto firstTypeSuffixAbbreviated = parseTypeSuffixAbbreviated(); |
| 527 | if (!firstTypeSuffixAbbreviated) |
| 528 | return Unexpected<Error>(firstTypeSuffixAbbreviated.error()); |
| 529 | UniqueRef<AST::UnnamedType> result = makeUniqueRef<AST::TypeReference>(WTFMove(*addressSpaceToken), name->stringView.toString(), WTFMove(*typeArguments)); |
| 530 | auto next = constructTypeFromSuffixAbbreviated(*firstTypeSuffixAbbreviated, WTFMove(result)); |
| 531 | result = WTFMove(next); |
| 532 | while (true) { |
| 533 | auto typeSuffixAbbreviated = backtrackingScope<Expected<TypeSuffixAbbreviated, Error>>([&]() { |
| 534 | return parseTypeSuffixAbbreviated(); |
| 535 | }); |
| 536 | if (!typeSuffixAbbreviated) |
| 537 | break; |
| 538 | // FIXME: The nesting here might be in the wrong order. |
| 539 | next = constructTypeFromSuffixAbbreviated(*typeSuffixAbbreviated, WTFMove(result)); |
| 540 | result = WTFMove(next); |
| 541 | } |
| 542 | |
| 543 | return WTFMove(result); |
| 544 | } |
| 545 | |
| 546 | auto Parser::parseNonAddressSpaceType() -> Expected<UniqueRef<AST::UnnamedType>, Error> |
| 547 | { |
| 548 | auto origin = peek(); |
| 549 | if (!origin) |
| 550 | return Unexpected<Error>(origin.error()); |
| 551 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 552 | if (!name) |
| 553 | return Unexpected<Error>(name.error()); |
| 554 | auto typeArguments = parseTypeArguments(); |
| 555 | if (!typeArguments) |
| 556 | return Unexpected<Error>(typeArguments.error()); |
| 557 | |
| 558 | auto constructTypeFromSuffixNonAbbreviated = [&](const TypeSuffixNonAbbreviated& typeSuffixNonAbbreviated, UniqueRef<AST::UnnamedType>&& previous) -> UniqueRef<AST::UnnamedType> { |
| 559 | switch (typeSuffixNonAbbreviated.token.type) { |
| 560 | case Lexer::Token::Type::Star: |
| 561 | return { makeUniqueRef<AST::PointerType>(Lexer::Token(typeSuffixNonAbbreviated.token), *typeSuffixNonAbbreviated.addressSpace, WTFMove(previous)) }; |
| 562 | case Lexer::Token::Type::SquareBracketPair: |
| 563 | return { makeUniqueRef<AST::ArrayReferenceType>(Lexer::Token(typeSuffixNonAbbreviated.token), *typeSuffixNonAbbreviated.addressSpace, WTFMove(previous)) }; |
| 564 | default: |
| 565 | ASSERT(typeSuffixNonAbbreviated.token.type == Lexer::Token::Type::LeftSquareBracket); |
| 566 | return { makeUniqueRef<AST::ArrayType>(Lexer::Token(typeSuffixNonAbbreviated.token), WTFMove(previous), *typeSuffixNonAbbreviated.numElements) }; |
| 567 | } |
| 568 | }; |
| 569 | |
| 570 | UniqueRef<AST::UnnamedType> result = makeUniqueRef<AST::TypeReference>(WTFMove(*origin), name->stringView.toString(), WTFMove(*typeArguments)); |
| 571 | while (true) { |
| 572 | auto typeSuffixNonAbbreviated = backtrackingScope<Expected<TypeSuffixNonAbbreviated, Error>>([&]() { |
| 573 | return parseTypeSuffixNonAbbreviated(); |
| 574 | }); |
| 575 | if (!typeSuffixNonAbbreviated) |
| 576 | break; |
| 577 | // FIXME: The nesting here might be in the wrong order. |
| 578 | auto next = constructTypeFromSuffixNonAbbreviated(*typeSuffixNonAbbreviated, WTFMove(result)); |
| 579 | result = WTFMove(next); |
| 580 | } |
| 581 | |
| 582 | return WTFMove(result); |
| 583 | } |
| 584 | |
| 585 | auto Parser::parseType() -> Expected<UniqueRef<AST::UnnamedType>, Error> |
| 586 | { |
| 587 | { |
| 588 | auto type = backtrackingScope<Expected<UniqueRef<AST::UnnamedType>, Error>>([&]() { |
| 589 | return parseAddressSpaceType(); |
| 590 | }); |
| 591 | if (type) |
| 592 | return type; |
| 593 | } |
| 594 | |
| 595 | auto type = backtrackingScope<Expected<UniqueRef<AST::UnnamedType>, Error>>([&]() { |
| 596 | return parseNonAddressSpaceType(); |
| 597 | }); |
| 598 | if (type) |
| 599 | return type; |
| 600 | |
| 601 | return Unexpected<Error>(type.error()); |
| 602 | } |
| 603 | |
| 604 | auto Parser::parseTypeDefinition() -> Expected<AST::TypeDefinition, Error> |
| 605 | { |
| 606 | auto origin = consumeType(Lexer::Token::Type::Typedef); |
| 607 | if (!origin) |
| 608 | return Unexpected<Error>(origin.error()); |
| 609 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 610 | if (!name) |
| 611 | return Unexpected<Error>(name.error()); |
| 612 | auto equals = consumeType(Lexer::Token::Type::EqualsSign); |
| 613 | if (!equals) |
| 614 | return Unexpected<Error>(equals.error()); |
| 615 | auto type = parseType(); |
| 616 | if (!type) |
| 617 | return Unexpected<Error>(type.error()); |
| 618 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 619 | if (!semicolon) |
| 620 | return Unexpected<Error>(semicolon.error()); |
| 621 | return AST::TypeDefinition(WTFMove(*origin), name->stringView.toString(), WTFMove(*type)); |
| 622 | } |
| 623 | |
| 624 | auto Parser::parseBuiltInSemantic() -> Expected<AST::BuiltInSemantic, Error> |
| 625 | { |
| 626 | auto origin = consumeTypes({ |
| 627 | Lexer::Token::Type::SVInstanceID, |
| 628 | Lexer::Token::Type::SVVertexID, |
| 629 | Lexer::Token::Type::PSize, |
| 630 | Lexer::Token::Type::SVPosition, |
| 631 | Lexer::Token::Type::SVIsFrontFace, |
| 632 | Lexer::Token::Type::SVSampleIndex, |
| 633 | Lexer::Token::Type::SVInnerCoverage, |
| 634 | Lexer::Token::Type::SVTarget, |
| 635 | Lexer::Token::Type::SVDepth, |
| 636 | Lexer::Token::Type::SVCoverage, |
| 637 | Lexer::Token::Type::SVDispatchThreadID, |
| 638 | Lexer::Token::Type::SVGroupID, |
| 639 | Lexer::Token::Type::SVGroupIndex, |
| 640 | Lexer::Token::Type::SVGroupThreadID}); |
| 641 | if (!origin) |
| 642 | return Unexpected<Error>(origin.error()); |
| 643 | |
| 644 | switch (origin->type) { |
| 645 | case Lexer::Token::Type::SVInstanceID: |
| 646 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVInstanceID); |
| 647 | case Lexer::Token::Type::SVVertexID: |
| 648 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVVertexID); |
| 649 | case Lexer::Token::Type::PSize: |
| 650 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::PSize); |
| 651 | case Lexer::Token::Type::SVPosition: |
| 652 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVPosition); |
| 653 | case Lexer::Token::Type::SVIsFrontFace: |
| 654 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVIsFrontFace); |
| 655 | case Lexer::Token::Type::SVSampleIndex: |
| 656 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVSampleIndex); |
| 657 | case Lexer::Token::Type::SVInnerCoverage: |
| 658 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVInnerCoverage); |
| 659 | case Lexer::Token::Type::SVTarget: { |
| 660 | auto target = consumeNonNegativeIntegralLiteral(); // FIXME: https://bugs.webkit.org/show_bug.cgi?id=195807 Make this work with strings like "SV_Target0". |
| 661 | if (!target) |
| 662 | return Unexpected<Error>(target.error()); |
| 663 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVTarget, *target); |
| 664 | } |
| 665 | case Lexer::Token::Type::SVDepth: |
| 666 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVDepth); |
| 667 | case Lexer::Token::Type::SVCoverage: |
| 668 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVCoverage); |
| 669 | case Lexer::Token::Type::SVDispatchThreadID: |
| 670 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVDispatchThreadID); |
| 671 | case Lexer::Token::Type::SVGroupID: |
| 672 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVGroupID); |
| 673 | case Lexer::Token::Type::SVGroupIndex: |
| 674 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVGroupIndex); |
| 675 | default: |
| 676 | ASSERT(origin->type == Lexer::Token::Type::SVGroupThreadID); |
| 677 | return AST::BuiltInSemantic(WTFMove(*origin), AST::BuiltInSemantic::Variable::SVGroupThreadID); |
| 678 | } |
| 679 | } |
| 680 | |
| 681 | auto Parser::parseResourceSemantic() -> Expected<AST::ResourceSemantic, Error> |
| 682 | { |
| 683 | auto origin = consumeType(Lexer::Token::Type::Register); |
| 684 | if (!origin) |
| 685 | return Unexpected<Error>(origin.error()); |
| 686 | |
| 687 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 688 | if (!leftParenthesis) |
| 689 | return Unexpected<Error>(leftParenthesis.error()); |
| 690 | |
| 691 | auto info = consumeType(Lexer::Token::Type::Identifier); |
| 692 | if (!info) |
| 693 | return Unexpected<Error>(info.error()); |
| 694 | if (info->stringView.length() < 2 || (info->stringView[0] != 'u' |
| 695 | && info->stringView[0] != 't' |
| 696 | && info->stringView[0] != 'b' |
| 697 | && info->stringView[0] != 's')) |
| 698 | return Unexpected<Error>(Error(makeString(info->stringView.substring(0, 1), " is not a known resource type ('u', 't', 'b', or 's')" ))); |
| 699 | |
| 700 | AST::ResourceSemantic::Mode mode; |
| 701 | switch (info->stringView[0]) { |
| 702 | case 'u': |
| 703 | mode = AST::ResourceSemantic::Mode::UnorderedAccessView; |
| 704 | break; |
| 705 | case 't': |
| 706 | mode = AST::ResourceSemantic::Mode::Texture; |
| 707 | break; |
| 708 | case 'b': |
| 709 | mode = AST::ResourceSemantic::Mode::Buffer; |
| 710 | break; |
| 711 | case 's': |
| 712 | mode = AST::ResourceSemantic::Mode::Sampler; |
| 713 | break; |
| 714 | } |
| 715 | |
| 716 | auto index = recognizeSimpleUnsignedInteger(info->stringView.substring(1)); |
| 717 | if (!index) |
| 718 | return Unexpected<Error>(index.error()); |
| 719 | |
| 720 | unsigned space = 0; |
| 721 | if (tryType(Lexer::Token::Type::Comma)) { |
| 722 | auto spaceToken = consumeType(Lexer::Token::Type::Identifier); |
| 723 | if (!spaceToken) |
| 724 | return Unexpected<Error>(spaceToken.error()); |
| 725 | auto prefix = "space"_str ; |
| 726 | if (!spaceToken->stringView.startsWith(StringView(prefix))) |
| 727 | return Unexpected<Error>(Error(makeString("Second argument to resource semantic " , spaceToken->stringView, " needs be of the form 'space0'" ))); |
| 728 | if (spaceToken->stringView.length() <= prefix.length()) |
| 729 | return Unexpected<Error>(Error(makeString("Second argument to resource semantic " , spaceToken->stringView, " needs be of the form 'space0'" ))); |
| 730 | auto spaceValue = recognizeSimpleUnsignedInteger(spaceToken->stringView.substring(prefix.length())); |
| 731 | if (!spaceValue) |
| 732 | return Unexpected<Error>(spaceValue.error()); |
| 733 | space = *spaceValue; |
| 734 | } |
| 735 | |
| 736 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 737 | if (!rightParenthesis) |
| 738 | return Unexpected<Error>(rightParenthesis.error()); |
| 739 | |
| 740 | return AST::ResourceSemantic(WTFMove(*origin), mode, *index, space); |
| 741 | } |
| 742 | |
| 743 | auto Parser::parseSpecializationConstantSemantic() -> Expected<AST::SpecializationConstantSemantic, Error> |
| 744 | { |
| 745 | auto origin = consumeType(Lexer::Token::Type::Specialized); |
| 746 | if (!origin) |
| 747 | return Unexpected<Error>(origin.error()); |
| 748 | return AST::SpecializationConstantSemantic(WTFMove(*origin)); |
| 749 | } |
| 750 | |
| 751 | auto Parser::parseStageInOutSemantic() -> Expected<AST::StageInOutSemantic, Error> |
| 752 | { |
| 753 | auto origin = consumeType(Lexer::Token::Type::Attribute); |
| 754 | if (!origin) |
| 755 | return Unexpected<Error>(origin.error()); |
| 756 | |
| 757 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 758 | if (!leftParenthesis) |
| 759 | return Unexpected<Error>(leftParenthesis.error()); |
| 760 | |
| 761 | auto index = consumeNonNegativeIntegralLiteral(); |
| 762 | if (!index) |
| 763 | return Unexpected<Error>(index.error()); |
| 764 | |
| 765 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 766 | if (!rightParenthesis) |
| 767 | return Unexpected<Error>(rightParenthesis.error()); |
| 768 | |
| 769 | return AST::StageInOutSemantic(WTFMove(*origin), *index); |
| 770 | } |
| 771 | |
| 772 | auto Parser::parseSemantic() -> Expected<AST::Semantic, Error> |
| 773 | { |
| 774 | auto builtInSemantic = backtrackingScope<Expected<AST::BuiltInSemantic, Error>>([&]() { |
| 775 | return parseBuiltInSemantic(); |
| 776 | }); |
| 777 | if (builtInSemantic) |
| 778 | return AST::Semantic(WTFMove(*builtInSemantic)); |
| 779 | |
| 780 | auto resourceSemantic = backtrackingScope<Expected<AST::ResourceSemantic, Error>>([&]() { |
| 781 | return parseResourceSemantic(); |
| 782 | }); |
| 783 | if (resourceSemantic) |
| 784 | return AST::Semantic(WTFMove(*resourceSemantic)); |
| 785 | |
| 786 | auto specializationConstantSemantic = backtrackingScope<Expected<AST::SpecializationConstantSemantic, Error>>([&]() { |
| 787 | return parseSpecializationConstantSemantic(); |
| 788 | }); |
| 789 | if (specializationConstantSemantic) |
| 790 | return AST::Semantic(WTFMove(*specializationConstantSemantic)); |
| 791 | |
| 792 | auto stageInOutSemantic = backtrackingScope<Expected<AST::StageInOutSemantic, Error>>([&]() { |
| 793 | return parseStageInOutSemantic(); |
| 794 | }); |
| 795 | if (stageInOutSemantic) |
| 796 | return AST::Semantic(WTFMove(*stageInOutSemantic)); |
| 797 | |
| 798 | return Unexpected<Error>(stageInOutSemantic.error()); |
| 799 | } |
| 800 | AST::Qualifiers Parser::parseQualifiers() |
| 801 | { |
| 802 | AST::Qualifiers qualifiers; |
| 803 | while (true) { |
| 804 | if (auto next = tryType(Lexer::Token::Type::Qualifier)) { |
| 805 | if ("nointerpolation" == next->stringView) |
| 806 | qualifiers.append(AST::Qualifier::Nointerpolation); |
| 807 | else if ("noperspective" == next->stringView) |
| 808 | qualifiers.append(AST::Qualifier::Noperspective); |
| 809 | else if ("uniform" == next->stringView) |
| 810 | qualifiers.append(AST::Qualifier::Uniform); |
| 811 | else if ("centroid" == next->stringView) |
| 812 | qualifiers.append(AST::Qualifier::Centroid); |
| 813 | else { |
| 814 | ASSERT("sample" == next->stringView); |
| 815 | qualifiers.append(AST::Qualifier::Sample); |
| 816 | } |
| 817 | } else |
| 818 | break; |
| 819 | } |
| 820 | return qualifiers; |
| 821 | } |
| 822 | |
| 823 | auto Parser::parseStructureElement() -> Expected<AST::StructureElement, Error> |
| 824 | { |
| 825 | auto origin = peek(); |
| 826 | if (!origin) |
| 827 | return Unexpected<Error>(origin.error()); |
| 828 | |
| 829 | AST::Qualifiers qualifiers = parseQualifiers(); |
| 830 | |
| 831 | auto type = parseType(); |
| 832 | if (!type) |
| 833 | return Unexpected<Error>(type.error()); |
| 834 | |
| 835 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 836 | if (!name) |
| 837 | return Unexpected<Error>(name.error()); |
| 838 | |
| 839 | if (tryType(Lexer::Token::Type::Colon)) { |
| 840 | auto semantic = parseSemantic(); |
| 841 | if (!semantic) |
| 842 | return Unexpected<Error>(semantic.error()); |
| 843 | |
| 844 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 845 | if (!semicolon) |
| 846 | return Unexpected<Error>(semicolon.error()); |
| 847 | |
| 848 | return AST::StructureElement(WTFMove(*origin), WTFMove(qualifiers), WTFMove(*type), name->stringView.toString(), WTFMove(*semantic)); |
| 849 | } |
| 850 | |
| 851 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 852 | if (!semicolon) |
| 853 | return Unexpected<Error>(semicolon.error()); |
| 854 | |
| 855 | return AST::StructureElement(WTFMove(*origin), WTFMove(qualifiers), WTFMove(*type), name->stringView.toString(), WTF::nullopt); |
| 856 | } |
| 857 | |
| 858 | auto Parser::parseStructureDefinition() -> Expected<AST::StructureDefinition, Error> |
| 859 | { |
| 860 | auto origin = consumeType(Lexer::Token::Type::Struct); |
| 861 | if (!origin) |
| 862 | return Unexpected<Error>(origin.error()); |
| 863 | |
| 864 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 865 | if (!name) |
| 866 | return Unexpected<Error>(name.error()); |
| 867 | |
| 868 | auto leftCurlyBracket = consumeType(Lexer::Token::Type::LeftCurlyBracket); |
| 869 | if (!leftCurlyBracket) |
| 870 | return Unexpected<Error>(leftCurlyBracket.error()); |
| 871 | |
| 872 | AST::StructureElements structureElements; |
| 873 | while (true) { |
| 874 | auto structureElement = backtrackingScope<Expected<AST::StructureElement, Error>>([&]() { |
| 875 | return parseStructureElement(); |
| 876 | }); |
| 877 | if (structureElement) |
| 878 | structureElements.append(WTFMove(*structureElement)); |
| 879 | else |
| 880 | break; |
| 881 | } |
| 882 | |
| 883 | auto rightCurlyBracket = consumeType(Lexer::Token::Type::RightCurlyBracket); |
| 884 | if (!rightCurlyBracket) |
| 885 | return Unexpected<Error>(rightCurlyBracket.error()); |
| 886 | |
| 887 | return AST::StructureDefinition(WTFMove(*origin), name->stringView.toString(), WTFMove(structureElements)); |
| 888 | } |
| 889 | |
| 890 | auto Parser::parseEnumerationDefinition() -> Expected<AST::EnumerationDefinition, Error> |
| 891 | { |
| 892 | auto origin = consumeType(Lexer::Token::Type::Enum); |
| 893 | if (!origin) |
| 894 | return Unexpected<Error>(origin.error()); |
| 895 | |
| 896 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 897 | if (!name) |
| 898 | return Unexpected<Error>(name.error()); |
| 899 | |
| 900 | auto type = ([&]() -> Expected<UniqueRef<AST::UnnamedType>, Error> { |
| 901 | if (tryType(Lexer::Token::Type::Colon)) { |
| 902 | auto parsedType = parseType(); |
| 903 | if (!parsedType) |
| 904 | return Unexpected<Error>(parsedType.error()); |
| 905 | return WTFMove(*parsedType); |
| 906 | } |
| 907 | return { makeUniqueRef<AST::TypeReference>(Lexer::Token(*origin), "int"_str , AST::TypeArguments()) }; |
| 908 | })(); |
| 909 | if (!type) |
| 910 | return Unexpected<Error>(type.error()); |
| 911 | |
| 912 | auto leftCurlyBracket = consumeType(Lexer::Token::Type::LeftCurlyBracket); |
| 913 | if (!leftCurlyBracket) |
| 914 | return Unexpected<Error>(leftCurlyBracket.error()); |
| 915 | |
| 916 | auto firstEnumerationMember = parseEnumerationMember(); |
| 917 | if (!firstEnumerationMember) |
| 918 | return Unexpected<Error>(firstEnumerationMember.error()); |
| 919 | |
| 920 | AST::EnumerationDefinition result(WTFMove(*origin), name->stringView.toString(), WTFMove(*type)); |
| 921 | auto success = result.add(WTFMove(*firstEnumerationMember)); |
| 922 | if (!success) |
| 923 | return fail("Cannot add enumeration member"_str ); |
| 924 | |
| 925 | while (tryType(Lexer::Token::Type::Comma)) { |
| 926 | auto member = parseEnumerationMember(); |
| 927 | if (!member) |
| 928 | return Unexpected<Error>(member.error()); |
| 929 | success = result.add(WTFMove(*member)); |
| 930 | if (!success) |
| 931 | return fail("Cannot add enumeration member"_str ); |
| 932 | } |
| 933 | |
| 934 | auto rightCurlyBracket = consumeType(Lexer::Token::Type::RightCurlyBracket); |
| 935 | if (!rightCurlyBracket) |
| 936 | return Unexpected<Error>(rightCurlyBracket.error()); |
| 937 | |
| 938 | return WTFMove(result); |
| 939 | } |
| 940 | |
| 941 | auto Parser::parseEnumerationMember() -> Expected<AST::EnumerationMember, Error> |
| 942 | { |
| 943 | auto identifier = consumeType(Lexer::Token::Type::Identifier); |
| 944 | if (!identifier) |
| 945 | return Unexpected<Error>(identifier.error()); |
| 946 | auto name = identifier->stringView.toString(); |
| 947 | |
| 948 | if (tryType(Lexer::Token::Type::EqualsSign)) { |
| 949 | auto constantExpression = parseConstantExpression(); |
| 950 | if (!constantExpression) |
| 951 | return Unexpected<Error>(constantExpression.error()); |
| 952 | return AST::EnumerationMember(Lexer::Token(*identifier), WTFMove(name), WTFMove(*constantExpression)); |
| 953 | } |
| 954 | return AST::EnumerationMember(Lexer::Token(*identifier), WTFMove(name)); |
| 955 | } |
| 956 | |
| 957 | auto Parser::parseNativeTypeDeclaration() -> Expected<AST::NativeTypeDeclaration, Error> |
| 958 | { |
| 959 | auto origin = consumeType(Lexer::Token::Type::Native); |
| 960 | if (!origin) |
| 961 | return Unexpected<Error>(origin.error()); |
| 962 | |
| 963 | auto parsedTypedef = consumeType(Lexer::Token::Type::Typedef); |
| 964 | if (!parsedTypedef) |
| 965 | return Unexpected<Error>(parsedTypedef.error()); |
| 966 | |
| 967 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 968 | if (!name) |
| 969 | return Unexpected<Error>(name.error()); |
| 970 | |
| 971 | auto typeArguments = parseTypeArguments(); |
| 972 | if (!typeArguments) |
| 973 | return Unexpected<Error>(typeArguments.error()); |
| 974 | |
| 975 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 976 | if (!semicolon) |
| 977 | return Unexpected<Error>(semicolon.error()); |
| 978 | |
| 979 | return AST::NativeTypeDeclaration(WTFMove(*origin), name->stringView.toString(), WTFMove(*typeArguments)); |
| 980 | } |
| 981 | |
| 982 | auto Parser::parseNumThreadsFunctionAttribute() -> Expected<AST::NumThreadsFunctionAttribute, Error> |
| 983 | { |
| 984 | auto origin = consumeType(Lexer::Token::Type::NumThreads); |
| 985 | if (!origin) |
| 986 | return Unexpected<Error>(origin.error()); |
| 987 | |
| 988 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 989 | if (!leftParenthesis) |
| 990 | return Unexpected<Error>(leftParenthesis.error()); |
| 991 | |
| 992 | auto width = consumeNonNegativeIntegralLiteral(); |
| 993 | if (!width) |
| 994 | return Unexpected<Error>(width.error()); |
| 995 | |
| 996 | auto comma = consumeType(Lexer::Token::Type::Comma); |
| 997 | if (!comma) |
| 998 | return Unexpected<Error>(comma.error()); |
| 999 | |
| 1000 | auto height = consumeNonNegativeIntegralLiteral(); |
| 1001 | if (!height) |
| 1002 | return Unexpected<Error>(height.error()); |
| 1003 | |
| 1004 | comma = consumeType(Lexer::Token::Type::Comma); |
| 1005 | if (!comma) |
| 1006 | return Unexpected<Error>(comma.error()); |
| 1007 | |
| 1008 | auto depth = consumeNonNegativeIntegralLiteral(); |
| 1009 | if (!depth) |
| 1010 | return Unexpected<Error>(depth.error()); |
| 1011 | |
| 1012 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1013 | if (!rightParenthesis) |
| 1014 | return Unexpected<Error>(rightParenthesis.error()); |
| 1015 | |
| 1016 | return AST::NumThreadsFunctionAttribute(WTFMove(*origin), *width, *height, *depth); |
| 1017 | } |
| 1018 | |
| 1019 | auto Parser::parseAttributeBlock() -> Expected<AST::AttributeBlock, Error> |
| 1020 | { |
| 1021 | auto leftSquareBracket = consumeType(Lexer::Token::Type::LeftSquareBracket); |
| 1022 | if (!leftSquareBracket) |
| 1023 | return Unexpected<Error>(leftSquareBracket.error()); |
| 1024 | |
| 1025 | AST::AttributeBlock result; |
| 1026 | |
| 1027 | while (true) { |
| 1028 | auto numThreadsFunctionAttribute = backtrackingScope<Expected<AST::NumThreadsFunctionAttribute, Error>>([&]() { |
| 1029 | return parseNumThreadsFunctionAttribute(); |
| 1030 | }); |
| 1031 | if (numThreadsFunctionAttribute) { |
| 1032 | result.append(WTFMove(*numThreadsFunctionAttribute)); |
| 1033 | continue; |
| 1034 | } |
| 1035 | |
| 1036 | break; |
| 1037 | } |
| 1038 | |
| 1039 | auto rightSquareBracket = consumeType(Lexer::Token::Type::RightSquareBracket); |
| 1040 | if (!rightSquareBracket) |
| 1041 | return Unexpected<Error>(rightSquareBracket.error()); |
| 1042 | |
| 1043 | return WTFMove(result); |
| 1044 | } |
| 1045 | |
| 1046 | auto Parser::parseParameter() -> Expected<AST::VariableDeclaration, Error> |
| 1047 | { |
| 1048 | auto origin = peek(); |
| 1049 | if (!origin) |
| 1050 | return Unexpected<Error>(origin.error()); |
| 1051 | |
| 1052 | AST::Qualifiers qualifiers = parseQualifiers(); |
| 1053 | |
| 1054 | auto type = parseType(); |
| 1055 | if (!type) |
| 1056 | return Unexpected<Error>(type.error()); |
| 1057 | |
| 1058 | String name; |
| 1059 | if (auto token = tryType(Lexer::Token::Type::Identifier)) |
| 1060 | name = token->stringView.toString(); |
| 1061 | |
| 1062 | if (tryType(Lexer::Token::Type::Colon)) { |
| 1063 | auto semantic = parseSemantic(); |
| 1064 | if (!semantic) |
| 1065 | return Unexpected<Error>(semantic.error()); |
| 1066 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), Optional<UniqueRef<AST::UnnamedType>>(WTFMove(*type)), WTFMove(name), WTFMove(*semantic), WTF::nullopt); |
| 1067 | } |
| 1068 | |
| 1069 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), { WTFMove(*type) }, WTFMove(name), WTF::nullopt, WTF::nullopt); |
| 1070 | } |
| 1071 | |
| 1072 | auto Parser::parseParameters() -> Expected<AST::VariableDeclarations, Error> |
| 1073 | { |
| 1074 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1075 | if (!leftParenthesis) |
| 1076 | return Unexpected<Error>(leftParenthesis.error()); |
| 1077 | |
| 1078 | AST::VariableDeclarations parameters; |
| 1079 | if (tryType(Lexer::Token::Type::RightParenthesis)) |
| 1080 | return WTFMove(parameters); |
| 1081 | |
| 1082 | auto firstParameter = parseParameter(); |
| 1083 | if (!firstParameter) |
| 1084 | return Unexpected<Error>(firstParameter.error()); |
| 1085 | parameters.append(WTFMove(*firstParameter)); |
| 1086 | |
| 1087 | while (tryType(Lexer::Token::Type::Comma)) { |
| 1088 | auto parameter = parseParameter(); |
| 1089 | if (!parameter) |
| 1090 | return Unexpected<Error>(parameter.error()); |
| 1091 | parameters.append(WTFMove(*parameter)); |
| 1092 | } |
| 1093 | |
| 1094 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1095 | if (!rightParenthesis) |
| 1096 | return Unexpected<Error>(rightParenthesis.error()); |
| 1097 | |
| 1098 | return WTFMove(parameters); |
| 1099 | } |
| 1100 | |
| 1101 | auto Parser::parseFunctionDefinition() -> Expected<AST::FunctionDefinition, Error> |
| 1102 | { |
| 1103 | auto functionDeclaration = parseFunctionDeclaration(); |
| 1104 | if (!functionDeclaration) |
| 1105 | return Unexpected<Error>(functionDeclaration.error()); |
| 1106 | |
| 1107 | auto block = parseBlock(); |
| 1108 | if (!block) |
| 1109 | return Unexpected<Error>(block.error()); |
| 1110 | |
| 1111 | return AST::FunctionDefinition(WTFMove(*functionDeclaration), WTFMove(*block)); |
| 1112 | } |
| 1113 | |
| 1114 | auto Parser::parseEntryPointFunctionDeclaration() -> Expected<AST::FunctionDeclaration, Error> |
| 1115 | { |
| 1116 | auto origin = peek(); |
| 1117 | if (!origin) |
| 1118 | return Unexpected<Error>(origin.error()); |
| 1119 | |
| 1120 | AST::AttributeBlock attributeBlock; |
| 1121 | AST::EntryPointType entryPointType; |
| 1122 | |
| 1123 | auto parsedAttributeBlock = backtrackingScope<Expected<AST::AttributeBlock, Error>>([&]() { |
| 1124 | return parseAttributeBlock(); |
| 1125 | }); |
| 1126 | if (parsedAttributeBlock) { |
| 1127 | auto compute = consumeType(Lexer::Token::Type::Compute); |
| 1128 | if (!compute) |
| 1129 | return Unexpected<Error>(compute.error()); |
| 1130 | attributeBlock = WTFMove(*parsedAttributeBlock); |
| 1131 | entryPointType = AST::EntryPointType::Compute; |
| 1132 | } else { |
| 1133 | auto type = consumeTypes({ Lexer::Token::Type::Vertex, Lexer::Token::Type::Fragment }); |
| 1134 | if (!type) |
| 1135 | return Unexpected<Error>(type.error()); |
| 1136 | |
| 1137 | switch (origin->type) { |
| 1138 | case Lexer::Token::Type::Vertex: |
| 1139 | entryPointType = AST::EntryPointType::Vertex; |
| 1140 | break; |
| 1141 | default: |
| 1142 | ASSERT(origin->type == Lexer::Token::Type::Fragment); |
| 1143 | entryPointType = AST::EntryPointType::Fragment; |
| 1144 | break; |
| 1145 | } |
| 1146 | } |
| 1147 | |
| 1148 | auto type = parseType(); |
| 1149 | if (!type) |
| 1150 | return Unexpected<Error>(type.error()); |
| 1151 | |
| 1152 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 1153 | if (!name) |
| 1154 | return Unexpected<Error>(name.error()); |
| 1155 | |
| 1156 | auto parameters = parseParameters(); |
| 1157 | if (!parameters) |
| 1158 | return Unexpected<Error>(parameters.error()); |
| 1159 | |
| 1160 | bool isOperator = false; |
| 1161 | |
| 1162 | if (tryType(Lexer::Token::Type::Colon)) { |
| 1163 | auto semantic = parseSemantic(); |
| 1164 | if (!semantic) |
| 1165 | return Unexpected<Error>(semantic.error()); |
| 1166 | return AST::FunctionDeclaration(WTFMove(*origin), WTFMove(attributeBlock), entryPointType, WTFMove(*type), name->stringView.toString(), WTFMove(*parameters), WTFMove(*semantic), isOperator); |
| 1167 | } |
| 1168 | |
| 1169 | return AST::FunctionDeclaration(WTFMove(*origin), WTFMove(attributeBlock), entryPointType, WTFMove(*type), name->stringView.toString(), WTFMove(*parameters), WTF::nullopt, isOperator); |
| 1170 | } |
| 1171 | |
| 1172 | auto Parser::parseRegularFunctionDeclaration() -> Expected<AST::FunctionDeclaration, Error> |
| 1173 | { |
| 1174 | auto origin = peek(); |
| 1175 | if (!origin) |
| 1176 | return Unexpected<Error>(origin.error()); |
| 1177 | |
| 1178 | auto type = parseType(); |
| 1179 | if (!type) |
| 1180 | return Unexpected<Error>(type.error()); |
| 1181 | |
| 1182 | auto name = consumeTypes({ Lexer::Token::Type::Identifier, Lexer::Token::Type::OperatorName }); |
| 1183 | if (!name) |
| 1184 | return Unexpected<Error>(name.error()); |
| 1185 | auto isOperator = name->type == Lexer::Token::Type::OperatorName; |
| 1186 | |
| 1187 | auto parameters = parseParameters(); |
| 1188 | if (!parameters) |
| 1189 | return Unexpected<Error>(parameters.error()); |
| 1190 | |
| 1191 | if (tryType(Lexer::Token::Type::Colon)) { |
| 1192 | auto semantic = parseSemantic(); |
| 1193 | if (!semantic) |
| 1194 | return Unexpected<Error>(semantic.error()); |
| 1195 | return AST::FunctionDeclaration(WTFMove(*origin), { }, WTF::nullopt, WTFMove(*type), name->stringView.toString(), WTFMove(*parameters), WTFMove(*semantic), isOperator); |
| 1196 | } |
| 1197 | |
| 1198 | return AST::FunctionDeclaration(WTFMove(*origin), { }, WTF::nullopt, WTFMove(*type), name->stringView.toString(), WTFMove(*parameters), WTF::nullopt, isOperator); |
| 1199 | } |
| 1200 | |
| 1201 | auto Parser::parseOperatorFunctionDeclaration() -> Expected<AST::FunctionDeclaration, Error> |
| 1202 | { |
| 1203 | auto origin = consumeType(Lexer::Token::Type::Operator); |
| 1204 | if (!origin) |
| 1205 | return Unexpected<Error>(origin.error()); |
| 1206 | |
| 1207 | auto type = parseType(); |
| 1208 | if (!type) |
| 1209 | return Unexpected<Error>(type.error()); |
| 1210 | |
| 1211 | auto parameters = parseParameters(); |
| 1212 | if (!parameters) |
| 1213 | return Unexpected<Error>(parameters.error()); |
| 1214 | |
| 1215 | bool isOperator = true; |
| 1216 | |
| 1217 | if (tryType(Lexer::Token::Type::Colon)) { |
| 1218 | auto semantic = parseSemantic(); |
| 1219 | if (!semantic) |
| 1220 | return Unexpected<Error>(semantic.error()); |
| 1221 | return AST::FunctionDeclaration(WTFMove(*origin), { }, WTF::nullopt, WTFMove(*type), "operator cast"_str , WTFMove(*parameters), WTFMove(*semantic), isOperator); |
| 1222 | } |
| 1223 | |
| 1224 | return AST::FunctionDeclaration(WTFMove(*origin), { }, WTF::nullopt, WTFMove(*type), "operator cast"_str , WTFMove(*parameters), WTF::nullopt, isOperator); |
| 1225 | } |
| 1226 | |
| 1227 | auto Parser::parseFunctionDeclaration() -> Expected<AST::FunctionDeclaration, Error> |
| 1228 | { |
| 1229 | auto entryPointFunctionDeclaration = backtrackingScope<Expected<AST::FunctionDeclaration, Error>>([&]() { |
| 1230 | return parseEntryPointFunctionDeclaration(); |
| 1231 | }); |
| 1232 | if (entryPointFunctionDeclaration) |
| 1233 | return WTFMove(*entryPointFunctionDeclaration); |
| 1234 | |
| 1235 | auto regularFunctionDeclaration = backtrackingScope<Expected<AST::FunctionDeclaration, Error>>([&]() { |
| 1236 | return parseRegularFunctionDeclaration(); |
| 1237 | }); |
| 1238 | if (regularFunctionDeclaration) |
| 1239 | return WTFMove(*regularFunctionDeclaration); |
| 1240 | |
| 1241 | auto operatorFunctionDeclaration = backtrackingScope<Expected<AST::FunctionDeclaration, Error>>([&]() { |
| 1242 | return parseOperatorFunctionDeclaration(); |
| 1243 | }); |
| 1244 | if (operatorFunctionDeclaration) |
| 1245 | return WTFMove(*operatorFunctionDeclaration); |
| 1246 | |
| 1247 | return Unexpected<Error>(operatorFunctionDeclaration.error()); |
| 1248 | } |
| 1249 | |
| 1250 | auto Parser::parseNativeFunctionDeclaration() -> Expected<AST::NativeFunctionDeclaration, Error> |
| 1251 | { |
| 1252 | Optional<Lexer::Token> origin; |
| 1253 | |
| 1254 | auto native = consumeType(Lexer::Token::Type::Native); |
| 1255 | if (!native) |
| 1256 | return Unexpected<Error>(native.error()); |
| 1257 | if (!origin) |
| 1258 | origin = *native; |
| 1259 | |
| 1260 | auto functionDeclaration = parseFunctionDeclaration(); |
| 1261 | if (!functionDeclaration) |
| 1262 | return Unexpected<Error>(functionDeclaration.error()); |
| 1263 | |
| 1264 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1265 | if (!semicolon) |
| 1266 | return Unexpected<Error>(semicolon.error()); |
| 1267 | |
| 1268 | return AST::NativeFunctionDeclaration(WTFMove(*functionDeclaration)); |
| 1269 | } |
| 1270 | |
| 1271 | auto Parser::parseBlock() -> Expected<AST::Block, Error> |
| 1272 | { |
| 1273 | auto origin = consumeType(Lexer::Token::Type::LeftCurlyBracket); |
| 1274 | if (!origin) |
| 1275 | return Unexpected<Error>(origin.error()); |
| 1276 | |
| 1277 | auto result = parseBlockBody(WTFMove(*origin)); |
| 1278 | |
| 1279 | auto rightCurlyBracket = consumeType(Lexer::Token::Type::RightCurlyBracket); |
| 1280 | if (!rightCurlyBracket) |
| 1281 | return Unexpected<Error>(rightCurlyBracket.error()); |
| 1282 | |
| 1283 | return WTFMove(result); |
| 1284 | } |
| 1285 | |
| 1286 | AST::Block Parser::parseBlockBody(Lexer::Token&& origin) |
| 1287 | { |
| 1288 | AST::Statements statements; |
| 1289 | while (true) { |
| 1290 | auto statement = backtrackingScope<Expected<UniqueRef<AST::Statement>, Error>>([&]() { |
| 1291 | return parseStatement(); |
| 1292 | }); |
| 1293 | if (statement) |
| 1294 | statements.append(WTFMove(*statement)); |
| 1295 | else |
| 1296 | break; |
| 1297 | } |
| 1298 | return AST::Block(WTFMove(origin), WTFMove(statements)); |
| 1299 | } |
| 1300 | |
| 1301 | auto Parser::parseIfStatement() -> Expected<AST::IfStatement, Error> |
| 1302 | { |
| 1303 | auto origin = consumeType(Lexer::Token::Type::If); |
| 1304 | if (!origin) |
| 1305 | return Unexpected<Error>(origin.error()); |
| 1306 | |
| 1307 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1308 | if (!leftParenthesis) |
| 1309 | return Unexpected<Error>(leftParenthesis.error()); |
| 1310 | |
| 1311 | auto conditional = parseExpression(); |
| 1312 | if (!conditional) |
| 1313 | return Unexpected<Error>(conditional.error()); |
| 1314 | |
| 1315 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1316 | if (!rightParenthesis) |
| 1317 | return Unexpected<Error>(rightParenthesis.error()); |
| 1318 | |
| 1319 | auto body = parseStatement(); |
| 1320 | if (!body) |
| 1321 | return Unexpected<Error>(body.error()); |
| 1322 | |
| 1323 | Optional<UniqueRef<AST::Statement>> elseBody; |
| 1324 | if (tryType(Lexer::Token::Type::Else)) { |
| 1325 | auto parsedElseBody = parseStatement(); |
| 1326 | if (!parsedElseBody) |
| 1327 | return Unexpected<Error>(parsedElseBody.error()); |
| 1328 | elseBody = WTFMove(*parsedElseBody); |
| 1329 | } |
| 1330 | |
| 1331 | Vector<UniqueRef<AST::Expression>> castArguments; |
| 1332 | castArguments.append(WTFMove(*conditional)); |
| 1333 | auto boolCast = makeUniqueRef<AST::CallExpression>(Lexer::Token(*origin), "bool"_str , WTFMove(castArguments)); |
| 1334 | return AST::IfStatement(WTFMove(*origin), WTFMove(boolCast), WTFMove(*body), WTFMove(elseBody)); |
| 1335 | } |
| 1336 | |
| 1337 | auto Parser::parseSwitchStatement() -> Expected<AST::SwitchStatement, Error> |
| 1338 | { |
| 1339 | auto origin = consumeType(Lexer::Token::Type::Switch); |
| 1340 | if (!origin) |
| 1341 | return Unexpected<Error>(origin.error()); |
| 1342 | |
| 1343 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1344 | if (!leftParenthesis) |
| 1345 | return Unexpected<Error>(leftParenthesis.error()); |
| 1346 | |
| 1347 | auto value = parseExpression(); |
| 1348 | if (!value) |
| 1349 | return Unexpected<Error>(value.error()); |
| 1350 | |
| 1351 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1352 | if (!rightParenthesis) |
| 1353 | return Unexpected<Error>(rightParenthesis.error()); |
| 1354 | |
| 1355 | auto leftCurlyBracket = consumeType(Lexer::Token::Type::LeftCurlyBracket); |
| 1356 | if (!leftCurlyBracket) |
| 1357 | return Unexpected<Error>(leftCurlyBracket.error()); |
| 1358 | |
| 1359 | Vector<AST::SwitchCase> switchCases; |
| 1360 | while (true) { |
| 1361 | auto switchCase = backtrackingScope<Expected<AST::SwitchCase, Error>>([&]() { |
| 1362 | return parseSwitchCase(); |
| 1363 | }); |
| 1364 | if (switchCase) |
| 1365 | switchCases.append(WTFMove(*switchCase)); |
| 1366 | else |
| 1367 | break; |
| 1368 | } |
| 1369 | |
| 1370 | auto rightCurlyBracket = consumeType(Lexer::Token::Type::RightCurlyBracket); |
| 1371 | if (!rightCurlyBracket) |
| 1372 | return Unexpected<Error>(rightCurlyBracket.error()); |
| 1373 | |
| 1374 | return AST::SwitchStatement(WTFMove(*origin), WTFMove(*value), WTFMove(switchCases)); |
| 1375 | } |
| 1376 | |
| 1377 | auto Parser::parseSwitchCase() -> Expected<AST::SwitchCase, Error> |
| 1378 | { |
| 1379 | auto origin = consumeTypes({ Lexer::Token::Type::Case, Lexer::Token::Type::Default }); |
| 1380 | if (!origin) |
| 1381 | return Unexpected<Error>(origin.error()); |
| 1382 | |
| 1383 | switch (origin->type) { |
| 1384 | case Lexer::Token::Type::Case: { |
| 1385 | auto value = parseConstantExpression(); |
| 1386 | if (!value) |
| 1387 | return Unexpected<Error>(value.error()); |
| 1388 | |
| 1389 | auto origin = consumeType(Lexer::Token::Type::Colon); |
| 1390 | if (!origin) |
| 1391 | return Unexpected<Error>(origin.error()); |
| 1392 | |
| 1393 | auto block = parseBlockBody(Lexer::Token(*origin)); |
| 1394 | |
| 1395 | return AST::SwitchCase(WTFMove(*origin), WTFMove(*value), WTFMove(block)); |
| 1396 | } |
| 1397 | default: { |
| 1398 | ASSERT(origin->type == Lexer::Token::Type::Default); |
| 1399 | auto origin = consumeType(Lexer::Token::Type::Colon); |
| 1400 | if (!origin) |
| 1401 | return Unexpected<Error>(origin.error()); |
| 1402 | |
| 1403 | auto block = parseBlockBody(Lexer::Token(*origin)); |
| 1404 | |
| 1405 | return AST::SwitchCase(WTFMove(*origin), WTF::nullopt, WTFMove(block)); |
| 1406 | } |
| 1407 | } |
| 1408 | } |
| 1409 | |
| 1410 | auto Parser::parseForLoop() -> Expected<AST::ForLoop, Error> |
| 1411 | { |
| 1412 | auto origin = consumeType(Lexer::Token::Type::For); |
| 1413 | if (!origin) |
| 1414 | return Unexpected<Error>(origin.error()); |
| 1415 | |
| 1416 | auto parseRemainder = [&](Variant<AST::VariableDeclarationsStatement, UniqueRef<AST::Expression>>&& initialization) -> Expected<AST::ForLoop, Error> { |
| 1417 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1418 | if (!semicolon) |
| 1419 | return Unexpected<Error>(semicolon.error()); |
| 1420 | |
| 1421 | auto condition = backtrackingScope<Optional<UniqueRef<AST::Expression>>>([&]() -> Optional<UniqueRef<AST::Expression>> { |
| 1422 | if (auto expression = parseExpression()) |
| 1423 | return { WTFMove(*expression) }; |
| 1424 | return WTF::nullopt; |
| 1425 | }); |
| 1426 | |
| 1427 | semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1428 | if (!semicolon) |
| 1429 | return Unexpected<Error>(semicolon.error()); |
| 1430 | |
| 1431 | auto increment = backtrackingScope<Optional<UniqueRef<AST::Expression>>>([&]() -> Optional<UniqueRef<AST::Expression>> { |
| 1432 | if (auto expression = parseExpression()) |
| 1433 | return { WTFMove(*expression) }; |
| 1434 | return WTF::nullopt; |
| 1435 | }); |
| 1436 | |
| 1437 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1438 | if (!rightParenthesis) |
| 1439 | return Unexpected<Error>(rightParenthesis.error()); |
| 1440 | |
| 1441 | auto body = parseStatement(); |
| 1442 | if (!body) |
| 1443 | return Unexpected<Error>(body.error()); |
| 1444 | |
| 1445 | return AST::ForLoop(WTFMove(*origin), WTFMove(initialization), WTFMove(condition), WTFMove(increment), WTFMove(*body)); |
| 1446 | }; |
| 1447 | |
| 1448 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1449 | if (!leftParenthesis) |
| 1450 | return Unexpected<Error>(leftParenthesis.error()); |
| 1451 | |
| 1452 | auto variableDeclarations = backtrackingScope<Expected<AST::VariableDeclarationsStatement, Error>>([&]() { |
| 1453 | return parseVariableDeclarations(); |
| 1454 | }); |
| 1455 | if (variableDeclarations) |
| 1456 | return parseRemainder(WTFMove(*variableDeclarations)); |
| 1457 | |
| 1458 | auto effectfulExpression = parseEffectfulExpression(); |
| 1459 | if (!effectfulExpression) |
| 1460 | return Unexpected<Error>(effectfulExpression.error()); |
| 1461 | |
| 1462 | return parseRemainder(WTFMove(*effectfulExpression)); |
| 1463 | } |
| 1464 | |
| 1465 | auto Parser::parseWhileLoop() -> Expected<AST::WhileLoop, Error> |
| 1466 | { |
| 1467 | auto origin = consumeType(Lexer::Token::Type::While); |
| 1468 | if (!origin) |
| 1469 | return Unexpected<Error>(origin.error()); |
| 1470 | |
| 1471 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1472 | if (!leftParenthesis) |
| 1473 | return Unexpected<Error>(leftParenthesis.error()); |
| 1474 | |
| 1475 | auto conditional = parseExpression(); |
| 1476 | if (!conditional) |
| 1477 | return Unexpected<Error>(conditional.error()); |
| 1478 | |
| 1479 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1480 | if (!rightParenthesis) |
| 1481 | return Unexpected<Error>(rightParenthesis.error()); |
| 1482 | |
| 1483 | auto body = parseStatement(); |
| 1484 | if (!body) |
| 1485 | return Unexpected<Error>(body.error()); |
| 1486 | |
| 1487 | return AST::WhileLoop(WTFMove(*origin), WTFMove(*conditional), WTFMove(*body)); |
| 1488 | } |
| 1489 | |
| 1490 | auto Parser::parseDoWhileLoop() -> Expected<AST::DoWhileLoop, Error> |
| 1491 | { |
| 1492 | auto origin = consumeType(Lexer::Token::Type::Do); |
| 1493 | if (!origin) |
| 1494 | return Unexpected<Error>(origin.error()); |
| 1495 | |
| 1496 | auto body = parseStatement(); |
| 1497 | if (!body) |
| 1498 | return Unexpected<Error>(body.error()); |
| 1499 | |
| 1500 | auto whileKeyword = consumeType(Lexer::Token::Type::While); |
| 1501 | if (!whileKeyword) |
| 1502 | return Unexpected<Error>(whileKeyword.error()); |
| 1503 | |
| 1504 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1505 | if (!leftParenthesis) |
| 1506 | return Unexpected<Error>(leftParenthesis.error()); |
| 1507 | |
| 1508 | auto conditional = parseExpression(); |
| 1509 | if (!conditional) |
| 1510 | return Unexpected<Error>(conditional.error()); |
| 1511 | |
| 1512 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1513 | if (!rightParenthesis) |
| 1514 | return Unexpected<Error>(rightParenthesis.error()); |
| 1515 | |
| 1516 | return AST::DoWhileLoop(WTFMove(*origin), WTFMove(*body), WTFMove(*conditional)); |
| 1517 | } |
| 1518 | |
| 1519 | auto Parser::parseVariableDeclaration(UniqueRef<AST::UnnamedType>&& type) -> Expected<AST::VariableDeclaration, Error> |
| 1520 | { |
| 1521 | auto origin = peek(); |
| 1522 | if (!origin) |
| 1523 | return Unexpected<Error>(origin.error()); |
| 1524 | |
| 1525 | auto qualifiers = parseQualifiers(); |
| 1526 | |
| 1527 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 1528 | if (!name) |
| 1529 | return Unexpected<Error>(name.error()); |
| 1530 | |
| 1531 | if (tryType(Lexer::Token::Type::Colon)) { |
| 1532 | auto semantic = parseSemantic(); |
| 1533 | if (!semantic) |
| 1534 | return Unexpected<Error>(semantic.error()); |
| 1535 | |
| 1536 | if (tryType(Lexer::Token::Type::EqualsSign)) { |
| 1537 | auto initializer = parseExpression(); |
| 1538 | if (!initializer) |
| 1539 | return Unexpected<Error>(initializer.error()); |
| 1540 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), { WTFMove(type) }, name->stringView.toString(), WTFMove(*semantic), WTFMove(*initializer)); |
| 1541 | } |
| 1542 | |
| 1543 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), { WTFMove(type) }, name->stringView.toString(), WTFMove(*semantic), WTF::nullopt); |
| 1544 | } |
| 1545 | |
| 1546 | if (tryType(Lexer::Token::Type::EqualsSign)) { |
| 1547 | auto initializer = parseExpression(); |
| 1548 | if (!initializer) |
| 1549 | return Unexpected<Error>(initializer.error()); |
| 1550 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), { WTFMove(type) }, name->stringView.toString(), WTF::nullopt, WTFMove(*initializer)); |
| 1551 | } |
| 1552 | |
| 1553 | return AST::VariableDeclaration(WTFMove(*origin), WTFMove(qualifiers), { WTFMove(type) }, name->stringView.toString(), WTF::nullopt, WTF::nullopt); |
| 1554 | } |
| 1555 | |
| 1556 | auto Parser::parseVariableDeclarations() -> Expected<AST::VariableDeclarationsStatement, Error> |
| 1557 | { |
| 1558 | auto origin = peek(); |
| 1559 | if (!origin) |
| 1560 | return Unexpected<Error>(origin.error()); |
| 1561 | |
| 1562 | auto type = parseType(); |
| 1563 | if (!type) |
| 1564 | return Unexpected<Error>(type.error()); |
| 1565 | |
| 1566 | auto firstVariableDeclaration = parseVariableDeclaration((*type)->clone()); |
| 1567 | if (!firstVariableDeclaration) |
| 1568 | return Unexpected<Error>(firstVariableDeclaration.error()); |
| 1569 | |
| 1570 | Vector<AST::VariableDeclaration> result; |
| 1571 | result.append(WTFMove(*firstVariableDeclaration)); |
| 1572 | |
| 1573 | while (tryType(Lexer::Token::Type::Comma)) { |
| 1574 | auto variableDeclaration = parseVariableDeclaration((*type)->clone()); |
| 1575 | if (!variableDeclaration) |
| 1576 | return Unexpected<Error>(variableDeclaration.error()); |
| 1577 | result.append(WTFMove(*variableDeclaration)); |
| 1578 | } |
| 1579 | |
| 1580 | return AST::VariableDeclarationsStatement(WTFMove(*origin), WTFMove(result)); |
| 1581 | } |
| 1582 | |
| 1583 | auto Parser::parseStatement() -> Expected<UniqueRef<AST::Statement>, Error> |
| 1584 | { |
| 1585 | { |
| 1586 | auto block = backtrackingScope<Expected<AST::Block, Error>>([&]() { |
| 1587 | return parseBlock(); |
| 1588 | }); |
| 1589 | if (block) |
| 1590 | return { makeUniqueRef<AST::Block>(WTFMove(*block)) }; |
| 1591 | } |
| 1592 | |
| 1593 | { |
| 1594 | auto ifStatement = backtrackingScope<Expected<AST::IfStatement, Error>>([&]() { |
| 1595 | return parseIfStatement(); |
| 1596 | }); |
| 1597 | if (ifStatement) |
| 1598 | return { makeUniqueRef<AST::IfStatement>(WTFMove(*ifStatement)) }; |
| 1599 | } |
| 1600 | |
| 1601 | { |
| 1602 | auto switchStatement = backtrackingScope<Expected<AST::SwitchStatement, Error>>([&]() { |
| 1603 | return parseSwitchStatement(); |
| 1604 | }); |
| 1605 | if (switchStatement) |
| 1606 | return { makeUniqueRef<AST::SwitchStatement>(WTFMove(*switchStatement)) }; |
| 1607 | } |
| 1608 | |
| 1609 | { |
| 1610 | auto forLoop = backtrackingScope<Expected<AST::ForLoop, Error>>([&]() { |
| 1611 | return parseForLoop(); |
| 1612 | }); |
| 1613 | if (forLoop) |
| 1614 | return { makeUniqueRef<AST::ForLoop>(WTFMove(*forLoop)) }; |
| 1615 | } |
| 1616 | |
| 1617 | { |
| 1618 | auto whileLoop = backtrackingScope<Expected<AST::WhileLoop, Error>>([&]() { |
| 1619 | return parseWhileLoop(); |
| 1620 | }); |
| 1621 | if (whileLoop) |
| 1622 | return { makeUniqueRef<AST::WhileLoop>(WTFMove(*whileLoop)) }; |
| 1623 | } |
| 1624 | |
| 1625 | { |
| 1626 | auto doWhileLoop = backtrackingScope<Expected<AST::DoWhileLoop, Error>>([&]() -> Expected<AST::DoWhileLoop, Error> { |
| 1627 | auto result = parseDoWhileLoop(); |
| 1628 | if (!result) |
| 1629 | return Unexpected<Error>(result.error()); |
| 1630 | |
| 1631 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1632 | if (!semicolon) |
| 1633 | return Unexpected<Error>(semicolon.error()); |
| 1634 | |
| 1635 | return result; |
| 1636 | }); |
| 1637 | if (doWhileLoop) |
| 1638 | return { makeUniqueRef<AST::DoWhileLoop>(WTFMove(*doWhileLoop)) }; |
| 1639 | } |
| 1640 | |
| 1641 | { |
| 1642 | auto breakObject = backtrackingScope<Expected<AST::Break, Error>>([&]() -> Expected<AST::Break, Error> { |
| 1643 | auto origin = consumeType(Lexer::Token::Type::Break); |
| 1644 | if (!origin) |
| 1645 | return Unexpected<Error>(origin.error()); |
| 1646 | |
| 1647 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1648 | if (!semicolon) |
| 1649 | return Unexpected<Error>(semicolon.error()); |
| 1650 | |
| 1651 | return AST::Break(WTFMove(*origin)); |
| 1652 | }); |
| 1653 | if (breakObject) |
| 1654 | return { makeUniqueRef<AST::Break>(WTFMove(*breakObject)) }; |
| 1655 | } |
| 1656 | |
| 1657 | { |
| 1658 | auto continueObject = backtrackingScope<Expected<AST::Continue, Error>>([&]() -> Expected<AST::Continue, Error> { |
| 1659 | auto origin = consumeType(Lexer::Token::Type::Continue); |
| 1660 | if (!origin) |
| 1661 | return Unexpected<Error>(origin.error()); |
| 1662 | |
| 1663 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1664 | if (!semicolon) |
| 1665 | return Unexpected<Error>(semicolon.error()); |
| 1666 | |
| 1667 | return AST::Continue(WTFMove(*origin)); |
| 1668 | }); |
| 1669 | if (continueObject) |
| 1670 | return { makeUniqueRef<AST::Continue>(WTFMove(*continueObject)) }; |
| 1671 | } |
| 1672 | |
| 1673 | { |
| 1674 | auto fallthroughObject = backtrackingScope<Expected<AST::Fallthrough, Error>>([&]() -> Expected<AST::Fallthrough, Error> { |
| 1675 | auto origin = consumeType(Lexer::Token::Type::Fallthrough); |
| 1676 | if (!origin) |
| 1677 | return Unexpected<Error>(origin.error()); |
| 1678 | |
| 1679 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1680 | if (!semicolon) |
| 1681 | return Unexpected<Error>(semicolon.error()); |
| 1682 | |
| 1683 | return AST::Fallthrough(WTFMove(*origin)); |
| 1684 | }); |
| 1685 | if (fallthroughObject) |
| 1686 | return { makeUniqueRef<AST::Fallthrough>(WTFMove(*fallthroughObject)) }; |
| 1687 | } |
| 1688 | |
| 1689 | { |
| 1690 | auto trapObject = backtrackingScope<Expected<AST::Trap, Error>>([&]() -> Expected<AST::Trap, Error> { |
| 1691 | auto origin = consumeType(Lexer::Token::Type::Trap); |
| 1692 | if (!origin) |
| 1693 | return Unexpected<Error>(origin.error()); |
| 1694 | |
| 1695 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1696 | if (!semicolon) |
| 1697 | return Unexpected<Error>(semicolon.error()); |
| 1698 | |
| 1699 | return AST::Trap(WTFMove(*origin)); |
| 1700 | }); |
| 1701 | if (trapObject) |
| 1702 | return { makeUniqueRef<AST::Trap>(WTFMove(*trapObject)) }; |
| 1703 | } |
| 1704 | |
| 1705 | { |
| 1706 | auto returnObject = backtrackingScope<Expected<AST::Return, Error>>([&]() -> Expected<AST::Return, Error> { |
| 1707 | auto origin = consumeType(Lexer::Token::Type::Return); |
| 1708 | if (!origin) |
| 1709 | return Unexpected<Error>(origin.error()); |
| 1710 | |
| 1711 | if (auto semicolon = tryType(Lexer::Token::Type::Semicolon)) |
| 1712 | return AST::Return(WTFMove(*origin), WTF::nullopt); |
| 1713 | |
| 1714 | auto expression = parseExpression(); |
| 1715 | if (!expression) |
| 1716 | return Unexpected<Error>(expression.error()); |
| 1717 | |
| 1718 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1719 | if (!semicolon) |
| 1720 | return Unexpected<Error>(semicolon.error()); |
| 1721 | |
| 1722 | return AST::Return(WTFMove(*origin), { WTFMove(*expression) }); |
| 1723 | }); |
| 1724 | if (returnObject) |
| 1725 | return { makeUniqueRef<AST::Return>(WTFMove(*returnObject)) }; |
| 1726 | } |
| 1727 | |
| 1728 | { |
| 1729 | auto variableDeclarations = backtrackingScope<Expected<AST::VariableDeclarationsStatement, Error>>([&]() -> Expected<AST::VariableDeclarationsStatement, Error> { |
| 1730 | auto result = parseVariableDeclarations(); |
| 1731 | if (!result) |
| 1732 | return Unexpected<Error>(result.error()); |
| 1733 | |
| 1734 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1735 | if (!semicolon) |
| 1736 | return Unexpected<Error>(semicolon.error()); |
| 1737 | |
| 1738 | return result; |
| 1739 | }); |
| 1740 | if (variableDeclarations) |
| 1741 | return { makeUniqueRef<AST::VariableDeclarationsStatement>(WTFMove(*variableDeclarations)) }; |
| 1742 | } |
| 1743 | |
| 1744 | auto effectfulExpression = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() -> Expected<UniqueRef<AST::Expression>, Error> { |
| 1745 | auto result = parseEffectfulExpression(); |
| 1746 | if (!result) |
| 1747 | return Unexpected<Error>(result.error()); |
| 1748 | |
| 1749 | auto semicolon = consumeType(Lexer::Token::Type::Semicolon); |
| 1750 | if (!semicolon) |
| 1751 | return Unexpected<Error>(semicolon.error()); |
| 1752 | |
| 1753 | return result; |
| 1754 | }); |
| 1755 | if (effectfulExpression) |
| 1756 | return { makeUniqueRef<AST::EffectfulExpressionStatement>(WTFMove(*effectfulExpression)) }; |
| 1757 | |
| 1758 | return Unexpected<Error>(effectfulExpression.error()); |
| 1759 | } |
| 1760 | |
| 1761 | auto Parser::parseEffectfulExpression() -> Expected<UniqueRef<AST::Expression>, Error> |
| 1762 | { |
| 1763 | auto origin = peek(); |
| 1764 | if (!origin) |
| 1765 | return Unexpected<Error>(origin.error()); |
| 1766 | |
| 1767 | Vector<UniqueRef<AST::Expression>> expressions; |
| 1768 | |
| 1769 | auto first = backtrackingScope<Optional<UniqueRef<AST::Expression>>>([&]() -> Optional<UniqueRef<AST::Expression>> { |
| 1770 | auto effectfulExpression = parseEffectfulAssignment(); |
| 1771 | if (!effectfulExpression) |
| 1772 | return WTF::nullopt; |
| 1773 | return { WTFMove(*effectfulExpression) }; |
| 1774 | }); |
| 1775 | if (!first) |
| 1776 | return { makeUniqueRef<AST::CommaExpression>(WTFMove(*origin), WTFMove(expressions)) }; |
| 1777 | |
| 1778 | expressions.append(WTFMove(*first)); |
| 1779 | |
| 1780 | while (tryType(Lexer::Token::Type::Comma)) { |
| 1781 | auto expression = parseEffectfulAssignment(); |
| 1782 | if (!expression) |
| 1783 | return Unexpected<Error>(expression.error()); |
| 1784 | expressions.append(WTFMove(*expression)); |
| 1785 | } |
| 1786 | |
| 1787 | if (expressions.size() == 1) |
| 1788 | return WTFMove(expressions[0]); |
| 1789 | return { makeUniqueRef<AST::CommaExpression>(WTFMove(*origin), WTFMove(expressions)) }; |
| 1790 | } |
| 1791 | |
| 1792 | auto Parser::parseEffectfulAssignment() -> Expected<UniqueRef<AST::Expression>, Error> |
| 1793 | { |
| 1794 | auto assignment = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 1795 | return parseAssignment(); |
| 1796 | }); |
| 1797 | if (assignment) |
| 1798 | return assignment; |
| 1799 | |
| 1800 | assignment = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 1801 | return parseEffectfulPrefix(); |
| 1802 | }); |
| 1803 | if (assignment) |
| 1804 | return assignment; |
| 1805 | |
| 1806 | return Unexpected<Error>(assignment.error()); |
| 1807 | } |
| 1808 | |
| 1809 | auto Parser::parseEffectfulPrefix() -> Expected<UniqueRef<AST::Expression>, Error> |
| 1810 | { |
| 1811 | auto prefix = consumeTypes({ Lexer::Token::Type::PlusPlus, Lexer::Token::Type::MinusMinus }); |
| 1812 | if (!prefix) |
| 1813 | return parseEffectfulSuffix(); |
| 1814 | |
| 1815 | auto previous = parsePossiblePrefix(); |
| 1816 | if (!previous) |
| 1817 | return Unexpected<Error>(previous.error()); |
| 1818 | |
| 1819 | switch (prefix->type) { |
| 1820 | case Lexer::Token::Type::PlusPlus: { |
| 1821 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*prefix), WTFMove(*previous)); |
| 1822 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1823 | callArguments.append(result->oldVariableReference()); |
| 1824 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*prefix), "operator++"_str , WTFMove(callArguments))); |
| 1825 | result->setResultExpression(result->newVariableReference()); |
| 1826 | return { WTFMove(result) }; |
| 1827 | } |
| 1828 | default: { |
| 1829 | ASSERT(prefix->type == Lexer::Token::Type::MinusMinus); |
| 1830 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*prefix), WTFMove(*previous)); |
| 1831 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1832 | callArguments.append(result->oldVariableReference()); |
| 1833 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*prefix), "operator--"_str , WTFMove(callArguments))); |
| 1834 | result->setResultExpression(result->newVariableReference()); |
| 1835 | return { WTFMove(result) }; |
| 1836 | } |
| 1837 | } |
| 1838 | } |
| 1839 | |
| 1840 | auto Parser::parseEffectfulSuffix() -> Expected<UniqueRef<AST::Expression>, Error> |
| 1841 | { |
| 1842 | auto effectfulSuffix = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() -> Expected<UniqueRef<AST::Expression>, Error> { |
| 1843 | auto previous = parsePossibleSuffix(); |
| 1844 | if (!previous) |
| 1845 | return Unexpected<Error>(previous.error()); |
| 1846 | |
| 1847 | auto suffix = consumeTypes({ Lexer::Token::Type::PlusPlus, Lexer::Token::Type::MinusMinus }); |
| 1848 | if (!suffix) |
| 1849 | return Unexpected<Error>(suffix.error()); |
| 1850 | |
| 1851 | switch (suffix->type) { |
| 1852 | case Lexer::Token::Type::PlusPlus: { |
| 1853 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*suffix), WTFMove(*previous)); |
| 1854 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1855 | callArguments.append(result->oldVariableReference()); |
| 1856 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*suffix), "operator++"_str , WTFMove(callArguments))); |
| 1857 | result->setResultExpression(result->oldVariableReference()); |
| 1858 | return { WTFMove(result) }; |
| 1859 | } |
| 1860 | default: { |
| 1861 | ASSERT(suffix->type == Lexer::Token::Type::MinusMinus); |
| 1862 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*suffix), WTFMove(*previous)); |
| 1863 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1864 | callArguments.append(result->oldVariableReference()); |
| 1865 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*suffix), "operator--"_str , WTFMove(callArguments))); |
| 1866 | result->setResultExpression(result->oldVariableReference()); |
| 1867 | return { WTFMove(result) }; |
| 1868 | } |
| 1869 | } |
| 1870 | }); |
| 1871 | if (effectfulSuffix) |
| 1872 | return effectfulSuffix; |
| 1873 | |
| 1874 | effectfulSuffix = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 1875 | return parseCallExpression(); |
| 1876 | }); |
| 1877 | if (effectfulSuffix) |
| 1878 | return effectfulSuffix; |
| 1879 | |
| 1880 | effectfulSuffix = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() -> Expected<UniqueRef<AST::Expression>, Error> { |
| 1881 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 1882 | if (!leftParenthesis) |
| 1883 | return Unexpected<Error>(leftParenthesis.error()); |
| 1884 | |
| 1885 | auto expression = parseExpression(); |
| 1886 | if (!expression) |
| 1887 | return Unexpected<Error>(expression.error()); |
| 1888 | |
| 1889 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 1890 | if (!rightParenthesis) |
| 1891 | return Unexpected<Error>(rightParenthesis.error()); |
| 1892 | |
| 1893 | return { WTFMove(*expression) }; |
| 1894 | }); |
| 1895 | if (effectfulSuffix) |
| 1896 | return effectfulSuffix; |
| 1897 | |
| 1898 | return Unexpected<Error>(effectfulSuffix.error()); |
| 1899 | } |
| 1900 | |
| 1901 | auto Parser::parseLimitedSuffixOperator(UniqueRef<AST::Expression>&& previous) -> SuffixExpression |
| 1902 | { |
| 1903 | auto type = consumeTypes({ Lexer::Token::Type::FullStop, Lexer::Token::Type::Arrow, Lexer::Token::Type::LeftSquareBracket }); |
| 1904 | if (!type) |
| 1905 | return SuffixExpression(WTFMove(previous), false); |
| 1906 | |
| 1907 | switch (type->type) { |
| 1908 | case Lexer::Token::Type::FullStop: { |
| 1909 | auto identifier = consumeType(Lexer::Token::Type::Identifier); |
| 1910 | if (!identifier) |
| 1911 | return SuffixExpression(WTFMove(previous), false); |
| 1912 | return SuffixExpression(makeUniqueRef<AST::DotExpression>(WTFMove(*type), WTFMove(previous), identifier->stringView.toString()), true); |
| 1913 | } |
| 1914 | case Lexer::Token::Type::Arrow: { |
| 1915 | auto identifier = consumeType(Lexer::Token::Type::Identifier); |
| 1916 | if (!identifier) |
| 1917 | return SuffixExpression(WTFMove(previous), false); |
| 1918 | return SuffixExpression(makeUniqueRef<AST::DotExpression>(Lexer::Token(*type), makeUniqueRef<AST::DereferenceExpression>(WTFMove(*type), WTFMove(previous)), identifier->stringView.toString()), true); |
| 1919 | } |
| 1920 | default: { |
| 1921 | ASSERT(type->type == Lexer::Token::Type::LeftSquareBracket); |
| 1922 | auto expression = parseExpression(); |
| 1923 | if (!expression) |
| 1924 | return SuffixExpression(WTFMove(previous), false); |
| 1925 | if (!consumeType(Lexer::Token::Type::RightSquareBracket)) |
| 1926 | return SuffixExpression(WTFMove(previous), false); |
| 1927 | return SuffixExpression(makeUniqueRef<AST::IndexExpression>(WTFMove(*type), WTFMove(previous), WTFMove(*expression)), true); |
| 1928 | } |
| 1929 | } |
| 1930 | } |
| 1931 | |
| 1932 | auto Parser::parseSuffixOperator(UniqueRef<AST::Expression>&& previous) -> SuffixExpression |
| 1933 | { |
| 1934 | auto suffix = consumeTypes({ Lexer::Token::Type::FullStop, Lexer::Token::Type::Arrow, Lexer::Token::Type::LeftSquareBracket, Lexer::Token::Type::PlusPlus, Lexer::Token::Type::MinusMinus }); |
| 1935 | if (!suffix) |
| 1936 | return SuffixExpression(WTFMove(previous), false); |
| 1937 | |
| 1938 | switch (suffix->type) { |
| 1939 | case Lexer::Token::Type::FullStop: { |
| 1940 | auto identifier = consumeType(Lexer::Token::Type::Identifier); |
| 1941 | if (!identifier) |
| 1942 | return SuffixExpression(WTFMove(previous), false); |
| 1943 | return SuffixExpression(makeUniqueRef<AST::DotExpression>(WTFMove(*suffix), WTFMove(previous), identifier->stringView.toString()), true); |
| 1944 | } |
| 1945 | case Lexer::Token::Type::Arrow: { |
| 1946 | auto identifier = consumeType(Lexer::Token::Type::Identifier); |
| 1947 | if (!identifier) |
| 1948 | return SuffixExpression(WTFMove(previous), false); |
| 1949 | return SuffixExpression(makeUniqueRef<AST::DotExpression>(Lexer::Token(*suffix), makeUniqueRef<AST::DereferenceExpression>(WTFMove(*suffix), WTFMove(previous)), identifier->stringView.toString()), true); |
| 1950 | } |
| 1951 | case Lexer::Token::Type::LeftSquareBracket: { |
| 1952 | auto expression = parseExpression(); |
| 1953 | if (!expression) |
| 1954 | return SuffixExpression(WTFMove(previous), false); |
| 1955 | if (!consumeType(Lexer::Token::Type::RightSquareBracket)) |
| 1956 | return SuffixExpression(WTFMove(previous), false); |
| 1957 | return SuffixExpression(makeUniqueRef<AST::IndexExpression>(WTFMove(*suffix), WTFMove(previous), WTFMove(*expression)), true); |
| 1958 | } |
| 1959 | case Lexer::Token::Type::PlusPlus: { |
| 1960 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*suffix), WTFMove(previous)); |
| 1961 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1962 | callArguments.append(result->oldVariableReference()); |
| 1963 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*suffix), "operator++"_str , WTFMove(callArguments))); |
| 1964 | result->setResultExpression(result->oldVariableReference()); |
| 1965 | return SuffixExpression(WTFMove(result), true); |
| 1966 | } |
| 1967 | default: { |
| 1968 | ASSERT(suffix->type == Lexer::Token::Type::MinusMinus); |
| 1969 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*suffix), WTFMove(previous)); |
| 1970 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 1971 | callArguments.append(result->oldVariableReference()); |
| 1972 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*suffix), "operator--"_str , WTFMove(callArguments))); |
| 1973 | result->setResultExpression(result->oldVariableReference()); |
| 1974 | return SuffixExpression(WTFMove(result), true); |
| 1975 | } |
| 1976 | } |
| 1977 | } |
| 1978 | |
| 1979 | auto Parser::parseExpression() -> Expected<UniqueRef<AST::Expression>, Error> |
| 1980 | { |
| 1981 | auto origin = peek(); |
| 1982 | if (!origin) |
| 1983 | return Unexpected<Error>(origin.error()); |
| 1984 | |
| 1985 | auto first = parsePossibleTernaryConditional(); |
| 1986 | if (!first) |
| 1987 | return Unexpected<Error>(first.error()); |
| 1988 | |
| 1989 | Vector<UniqueRef<AST::Expression>> expressions; |
| 1990 | expressions.append(WTFMove(*first)); |
| 1991 | |
| 1992 | while (tryType(Lexer::Token::Type::Comma)) { |
| 1993 | auto expression = parsePossibleTernaryConditional(); |
| 1994 | if (!expression) |
| 1995 | return Unexpected<Error>(expression.error()); |
| 1996 | expressions.append(WTFMove(*expression)); |
| 1997 | } |
| 1998 | |
| 1999 | if (expressions.size() == 1) |
| 2000 | return WTFMove(expressions[0]); |
| 2001 | return { makeUniqueRef<AST::CommaExpression>(WTFMove(*origin), WTFMove(expressions)) }; |
| 2002 | } |
| 2003 | |
| 2004 | auto Parser::parseTernaryConditional() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2005 | { |
| 2006 | auto origin = peek(); |
| 2007 | if (!origin) |
| 2008 | return Unexpected<Error>(origin.error()); |
| 2009 | |
| 2010 | auto predicate = parsePossibleLogicalBinaryOperation(); |
| 2011 | if (!predicate) |
| 2012 | return Unexpected<Error>(predicate.error()); |
| 2013 | |
| 2014 | auto questionMark = consumeType(Lexer::Token::Type::QuestionMark); |
| 2015 | if (!questionMark) |
| 2016 | return Unexpected<Error>(questionMark.error()); |
| 2017 | |
| 2018 | auto bodyExpression = parseExpression(); |
| 2019 | if (!bodyExpression) |
| 2020 | return Unexpected<Error>(bodyExpression.error()); |
| 2021 | |
| 2022 | auto colon = consumeType(Lexer::Token::Type::Colon); |
| 2023 | if (!colon) |
| 2024 | return Unexpected<Error>(colon.error()); |
| 2025 | |
| 2026 | auto elseExpression = parsePossibleTernaryConditional(); |
| 2027 | if (!elseExpression) |
| 2028 | return Unexpected<Error>(elseExpression.error()); |
| 2029 | |
| 2030 | Vector<UniqueRef<AST::Expression>> castArguments; |
| 2031 | castArguments.append(WTFMove(*predicate)); |
| 2032 | auto boolCast = makeUniqueRef<AST::CallExpression>(Lexer::Token(*origin), "bool"_str , WTFMove(castArguments)); |
| 2033 | return { makeUniqueRef<AST::TernaryExpression>(WTFMove(*origin), WTFMove(boolCast), WTFMove(*bodyExpression), WTFMove(*elseExpression)) }; |
| 2034 | } |
| 2035 | |
| 2036 | auto Parser::parseAssignment() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2037 | { |
| 2038 | auto origin = peek(); |
| 2039 | if (!origin) |
| 2040 | return Unexpected<Error>(origin.error()); |
| 2041 | |
| 2042 | auto left = parsePossiblePrefix(); |
| 2043 | if (!left) |
| 2044 | return Unexpected<Error>(left.error()); |
| 2045 | |
| 2046 | auto assignmentOperator = consumeTypes({ |
| 2047 | Lexer::Token::Type::EqualsSign, |
| 2048 | Lexer::Token::Type::PlusEquals, |
| 2049 | Lexer::Token::Type::MinusEquals, |
| 2050 | Lexer::Token::Type::TimesEquals, |
| 2051 | Lexer::Token::Type::DivideEquals, |
| 2052 | Lexer::Token::Type::ModEquals, |
| 2053 | Lexer::Token::Type::XorEquals, |
| 2054 | Lexer::Token::Type::AndEquals, |
| 2055 | Lexer::Token::Type::OrEquals, |
| 2056 | Lexer::Token::Type::RightShiftEquals, |
| 2057 | Lexer::Token::Type::LeftShiftEquals |
| 2058 | }); |
| 2059 | if (!assignmentOperator) |
| 2060 | return Unexpected<Error>(assignmentOperator.error()); |
| 2061 | |
| 2062 | auto right = parsePossibleTernaryConditional(); |
| 2063 | if (!right) |
| 2064 | return Unexpected<Error>(right.error()); |
| 2065 | |
| 2066 | if (assignmentOperator->type == Lexer::Token::Type::EqualsSign) |
| 2067 | return { makeUniqueRef<AST::AssignmentExpression>(WTFMove(*origin), WTFMove(*left), WTFMove(*right))}; |
| 2068 | |
| 2069 | String name; |
| 2070 | switch (assignmentOperator->type) { |
| 2071 | case Lexer::Token::Type::PlusEquals: |
| 2072 | name = "operator+"_str ; |
| 2073 | break; |
| 2074 | case Lexer::Token::Type::MinusEquals: |
| 2075 | name = "operator-"_str ; |
| 2076 | break; |
| 2077 | case Lexer::Token::Type::TimesEquals: |
| 2078 | name = "operator*"_str ; |
| 2079 | break; |
| 2080 | case Lexer::Token::Type::DivideEquals: |
| 2081 | name = "operator/"_str ; |
| 2082 | break; |
| 2083 | case Lexer::Token::Type::ModEquals: |
| 2084 | name = "operator%"_str ; |
| 2085 | break; |
| 2086 | case Lexer::Token::Type::XorEquals: |
| 2087 | name = "operator^"_str ; |
| 2088 | break; |
| 2089 | case Lexer::Token::Type::AndEquals: |
| 2090 | name = "operator&"_str ; |
| 2091 | break; |
| 2092 | case Lexer::Token::Type::OrEquals: |
| 2093 | name = "operator|"_str ; |
| 2094 | break; |
| 2095 | case Lexer::Token::Type::RightShiftEquals: |
| 2096 | name = "operator>>"_str ; |
| 2097 | break; |
| 2098 | default: |
| 2099 | ASSERT(assignmentOperator->type == Lexer::Token::Type::LeftShiftEquals); |
| 2100 | name = "operator<<"_str ; |
| 2101 | break; |
| 2102 | } |
| 2103 | |
| 2104 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*origin), WTFMove(*left)); |
| 2105 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2106 | callArguments.append(result->oldVariableReference()); |
| 2107 | callArguments.append(WTFMove(*right)); |
| 2108 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(WTFMove(*origin), WTFMove(name), WTFMove(callArguments))); |
| 2109 | result->setResultExpression(result->newVariableReference()); |
| 2110 | return { WTFMove(result) }; |
| 2111 | } |
| 2112 | |
| 2113 | auto Parser::parsePossibleTernaryConditional() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2114 | { |
| 2115 | auto ternaryExpression = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 2116 | return parseTernaryConditional(); |
| 2117 | }); |
| 2118 | if (ternaryExpression) |
| 2119 | return ternaryExpression; |
| 2120 | |
| 2121 | auto assignmentExpression = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 2122 | return parseAssignment(); |
| 2123 | }); |
| 2124 | if (assignmentExpression) |
| 2125 | return assignmentExpression; |
| 2126 | |
| 2127 | auto binaryOperation = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() { |
| 2128 | return parsePossibleLogicalBinaryOperation(); |
| 2129 | }); |
| 2130 | if (binaryOperation) |
| 2131 | return binaryOperation; |
| 2132 | |
| 2133 | return Unexpected<Error>(binaryOperation.error()); |
| 2134 | } |
| 2135 | |
| 2136 | auto Parser::parsePossibleLogicalBinaryOperation() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2137 | { |
| 2138 | auto parsedPrevious = parsePossibleRelationalBinaryOperation(); |
| 2139 | if (!parsedPrevious) |
| 2140 | return Unexpected<Error>(parsedPrevious.error()); |
| 2141 | UniqueRef<AST::Expression> previous = WTFMove(*parsedPrevious); |
| 2142 | |
| 2143 | while (auto logicalBinaryOperation = tryTypes({ |
| 2144 | Lexer::Token::Type::OrOr, |
| 2145 | Lexer::Token::Type::AndAnd, |
| 2146 | Lexer::Token::Type::Or, |
| 2147 | Lexer::Token::Type::Xor, |
| 2148 | Lexer::Token::Type::And |
| 2149 | })) { |
| 2150 | auto next = parsePossibleRelationalBinaryOperation(); |
| 2151 | if (!next) |
| 2152 | return Unexpected<Error>(next.error()); |
| 2153 | |
| 2154 | switch (logicalBinaryOperation->type) { |
| 2155 | case Lexer::Token::Type::OrOr: |
| 2156 | previous = makeUniqueRef<AST::LogicalExpression>(WTFMove(*logicalBinaryOperation), AST::LogicalExpression::Type::Or, WTFMove(previous), WTFMove(*next)); |
| 2157 | break; |
| 2158 | case Lexer::Token::Type::AndAnd: |
| 2159 | previous = makeUniqueRef<AST::LogicalExpression>(WTFMove(*logicalBinaryOperation), AST::LogicalExpression::Type::And, WTFMove(previous), WTFMove(*next)); |
| 2160 | break; |
| 2161 | case Lexer::Token::Type::Or: { |
| 2162 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2163 | callArguments.append(WTFMove(previous)); |
| 2164 | callArguments.append(WTFMove(*next)); |
| 2165 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*logicalBinaryOperation), "operator|"_str , WTFMove(callArguments)); |
| 2166 | break; |
| 2167 | } |
| 2168 | case Lexer::Token::Type::Xor: { |
| 2169 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2170 | callArguments.append(WTFMove(previous)); |
| 2171 | callArguments.append(WTFMove(*next)); |
| 2172 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*logicalBinaryOperation), "operator^"_str , WTFMove(callArguments)); |
| 2173 | break; |
| 2174 | } |
| 2175 | default: { |
| 2176 | ASSERT(logicalBinaryOperation->type == Lexer::Token::Type::And); |
| 2177 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2178 | callArguments.append(WTFMove(previous)); |
| 2179 | callArguments.append(WTFMove(*next)); |
| 2180 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*logicalBinaryOperation), "operator&"_str , WTFMove(callArguments)); |
| 2181 | break; |
| 2182 | } |
| 2183 | } |
| 2184 | } |
| 2185 | |
| 2186 | return { WTFMove(previous) }; |
| 2187 | } |
| 2188 | |
| 2189 | auto Parser::parsePossibleRelationalBinaryOperation() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2190 | { |
| 2191 | auto parsedPrevious = parsePossibleShift(); |
| 2192 | if (!parsedPrevious) |
| 2193 | return Unexpected<Error>(parsedPrevious.error()); |
| 2194 | UniqueRef<AST::Expression> previous = WTFMove(*parsedPrevious); |
| 2195 | |
| 2196 | while (auto relationalBinaryOperation = tryTypes({ |
| 2197 | Lexer::Token::Type::LessThanSign, |
| 2198 | Lexer::Token::Type::GreaterThanSign, |
| 2199 | Lexer::Token::Type::LessThanOrEqualTo, |
| 2200 | Lexer::Token::Type::GreaterThanOrEqualTo, |
| 2201 | Lexer::Token::Type::EqualComparison, |
| 2202 | Lexer::Token::Type::NotEqual |
| 2203 | })) { |
| 2204 | auto next = parsePossibleShift(); |
| 2205 | if (!next) |
| 2206 | return Unexpected<Error>(next.error()); |
| 2207 | |
| 2208 | switch (relationalBinaryOperation->type) { |
| 2209 | case Lexer::Token::Type::LessThanSign: { |
| 2210 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2211 | callArguments.append(WTFMove(previous)); |
| 2212 | callArguments.append(WTFMove(*next)); |
| 2213 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*relationalBinaryOperation), "operator<"_str , WTFMove(callArguments)); |
| 2214 | break; |
| 2215 | } |
| 2216 | case Lexer::Token::Type::GreaterThanSign: { |
| 2217 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2218 | callArguments.append(WTFMove(previous)); |
| 2219 | callArguments.append(WTFMove(*next)); |
| 2220 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*relationalBinaryOperation), "operator>"_str , WTFMove(callArguments)); |
| 2221 | break; |
| 2222 | } |
| 2223 | case Lexer::Token::Type::LessThanOrEqualTo: { |
| 2224 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2225 | callArguments.append(WTFMove(previous)); |
| 2226 | callArguments.append(WTFMove(*next)); |
| 2227 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*relationalBinaryOperation), "operator<="_str , WTFMove(callArguments)); |
| 2228 | break; |
| 2229 | } |
| 2230 | case Lexer::Token::Type::GreaterThanOrEqualTo: { |
| 2231 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2232 | callArguments.append(WTFMove(previous)); |
| 2233 | callArguments.append(WTFMove(*next)); |
| 2234 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*relationalBinaryOperation), "operator>="_str , WTFMove(callArguments)); |
| 2235 | break; |
| 2236 | } |
| 2237 | case Lexer::Token::Type::EqualComparison: { |
| 2238 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2239 | callArguments.append(WTFMove(previous)); |
| 2240 | callArguments.append(WTFMove(*next)); |
| 2241 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*relationalBinaryOperation), "operator=="_str , WTFMove(callArguments)); |
| 2242 | break; |
| 2243 | } |
| 2244 | default: { |
| 2245 | ASSERT(relationalBinaryOperation->type == Lexer::Token::Type::NotEqual); |
| 2246 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2247 | callArguments.append(WTFMove(previous)); |
| 2248 | callArguments.append(WTFMove(*next)); |
| 2249 | previous = makeUniqueRef<AST::CallExpression>(Lexer::Token(*relationalBinaryOperation), "operator=="_str , WTFMove(callArguments)); |
| 2250 | previous = makeUniqueRef<AST::LogicalNotExpression>(WTFMove(*relationalBinaryOperation), WTFMove(previous)); |
| 2251 | break; |
| 2252 | } |
| 2253 | } |
| 2254 | } |
| 2255 | |
| 2256 | return WTFMove(previous); |
| 2257 | } |
| 2258 | |
| 2259 | auto Parser::parsePossibleShift() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2260 | { |
| 2261 | auto parsedPrevious = parsePossibleAdd(); |
| 2262 | if (!parsedPrevious) |
| 2263 | return Unexpected<Error>(parsedPrevious.error()); |
| 2264 | UniqueRef<AST::Expression> previous = WTFMove(*parsedPrevious); |
| 2265 | |
| 2266 | while (auto shift = tryTypes({ |
| 2267 | Lexer::Token::Type::LeftShift, |
| 2268 | Lexer::Token::Type::RightShift |
| 2269 | })) { |
| 2270 | auto next = parsePossibleAdd(); |
| 2271 | if (!next) |
| 2272 | return Unexpected<Error>(next.error()); |
| 2273 | |
| 2274 | switch (shift->type) { |
| 2275 | case Lexer::Token::Type::LeftShift: { |
| 2276 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2277 | callArguments.append(WTFMove(previous)); |
| 2278 | callArguments.append(WTFMove(*next)); |
| 2279 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*shift), "operator<<"_str , WTFMove(callArguments)); |
| 2280 | break; |
| 2281 | } |
| 2282 | default: { |
| 2283 | ASSERT(shift->type == Lexer::Token::Type::RightShift); |
| 2284 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2285 | callArguments.append(WTFMove(previous)); |
| 2286 | callArguments.append(WTFMove(*next)); |
| 2287 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*shift), "operator>>"_str , WTFMove(callArguments)); |
| 2288 | break; |
| 2289 | } |
| 2290 | } |
| 2291 | } |
| 2292 | |
| 2293 | return WTFMove(previous); |
| 2294 | } |
| 2295 | |
| 2296 | auto Parser::parsePossibleAdd() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2297 | { |
| 2298 | auto parsedPrevious = parsePossibleMultiply(); |
| 2299 | if (!parsedPrevious) |
| 2300 | return Unexpected<Error>(parsedPrevious.error()); |
| 2301 | UniqueRef<AST::Expression> previous = WTFMove(*parsedPrevious); |
| 2302 | |
| 2303 | while (auto add = tryTypes({ |
| 2304 | Lexer::Token::Type::Plus, |
| 2305 | Lexer::Token::Type::Minus |
| 2306 | })) { |
| 2307 | auto next = parsePossibleMultiply(); |
| 2308 | if (!next) |
| 2309 | return Unexpected<Error>(next.error()); |
| 2310 | |
| 2311 | switch (add->type) { |
| 2312 | case Lexer::Token::Type::Plus: { |
| 2313 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2314 | callArguments.append(WTFMove(previous)); |
| 2315 | callArguments.append(WTFMove(*next)); |
| 2316 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*add), "operator+"_str , WTFMove(callArguments)); |
| 2317 | break; |
| 2318 | } |
| 2319 | default: { |
| 2320 | ASSERT(add->type == Lexer::Token::Type::Minus); |
| 2321 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2322 | callArguments.append(WTFMove(previous)); |
| 2323 | callArguments.append(WTFMove(*next)); |
| 2324 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*add), "operator-"_str , WTFMove(callArguments)); |
| 2325 | break; |
| 2326 | } |
| 2327 | } |
| 2328 | } |
| 2329 | |
| 2330 | return WTFMove(previous); |
| 2331 | } |
| 2332 | |
| 2333 | auto Parser::parsePossibleMultiply() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2334 | { |
| 2335 | auto parsedPrevious = parsePossiblePrefix(); |
| 2336 | if (!parsedPrevious) |
| 2337 | return Unexpected<Error>(parsedPrevious.error()); |
| 2338 | UniqueRef<AST::Expression> previous = WTFMove(*parsedPrevious); |
| 2339 | |
| 2340 | while (auto multiply = tryTypes({ |
| 2341 | Lexer::Token::Type::Star, |
| 2342 | Lexer::Token::Type::Divide, |
| 2343 | Lexer::Token::Type::Mod |
| 2344 | })) { |
| 2345 | auto next = parsePossiblePrefix(); |
| 2346 | if (!next) |
| 2347 | return Unexpected<Error>(next.error()); |
| 2348 | |
| 2349 | switch (multiply->type) { |
| 2350 | case Lexer::Token::Type::Star: { |
| 2351 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2352 | callArguments.append(WTFMove(previous)); |
| 2353 | callArguments.append(WTFMove(*next)); |
| 2354 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*multiply), "operator*"_str , WTFMove(callArguments)); |
| 2355 | break; |
| 2356 | } |
| 2357 | case Lexer::Token::Type::Divide: { |
| 2358 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2359 | callArguments.append(WTFMove(previous)); |
| 2360 | callArguments.append(WTFMove(*next)); |
| 2361 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*multiply), "operator/"_str , WTFMove(callArguments)); |
| 2362 | break; |
| 2363 | } |
| 2364 | default: { |
| 2365 | ASSERT(multiply->type == Lexer::Token::Type::Mod); |
| 2366 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2367 | callArguments.append(WTFMove(previous)); |
| 2368 | callArguments.append(WTFMove(*next)); |
| 2369 | previous = makeUniqueRef<AST::CallExpression>(WTFMove(*multiply), "operator%"_str , WTFMove(callArguments)); |
| 2370 | break; |
| 2371 | } |
| 2372 | } |
| 2373 | } |
| 2374 | |
| 2375 | return WTFMove(previous); |
| 2376 | } |
| 2377 | |
| 2378 | auto Parser::parsePossiblePrefix() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2379 | { |
| 2380 | if (auto prefix = tryTypes({ |
| 2381 | Lexer::Token::Type::PlusPlus, |
| 2382 | Lexer::Token::Type::MinusMinus, |
| 2383 | Lexer::Token::Type::Plus, |
| 2384 | Lexer::Token::Type::Minus, |
| 2385 | Lexer::Token::Type::Tilde, |
| 2386 | Lexer::Token::Type::ExclamationPoint, |
| 2387 | Lexer::Token::Type::And, |
| 2388 | Lexer::Token::Type::At, |
| 2389 | Lexer::Token::Type::Star |
| 2390 | })) { |
| 2391 | auto next = parsePossiblePrefix(); |
| 2392 | if (!next) |
| 2393 | return Unexpected<Error>(next.error()); |
| 2394 | |
| 2395 | switch (prefix->type) { |
| 2396 | case Lexer::Token::Type::PlusPlus: { |
| 2397 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*prefix), WTFMove(*next)); |
| 2398 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2399 | callArguments.append(result->oldVariableReference()); |
| 2400 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "operator++"_str , WTFMove(callArguments))); |
| 2401 | result->setResultExpression(result->newVariableReference()); |
| 2402 | return { WTFMove(result) }; |
| 2403 | } |
| 2404 | case Lexer::Token::Type::MinusMinus: { |
| 2405 | auto result = AST::ReadModifyWriteExpression::create(Lexer::Token(*prefix), WTFMove(*next)); |
| 2406 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2407 | callArguments.append(result->oldVariableReference()); |
| 2408 | result->setNewValueExpression(makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "operator--"_str , WTFMove(callArguments))); |
| 2409 | result->setResultExpression(result->newVariableReference()); |
| 2410 | return { WTFMove(result) }; |
| 2411 | } |
| 2412 | case Lexer::Token::Type::Plus: { |
| 2413 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2414 | callArguments.append(WTFMove(*next)); |
| 2415 | return { makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "operator+"_str , WTFMove(callArguments)) }; |
| 2416 | } |
| 2417 | case Lexer::Token::Type::Minus: { |
| 2418 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2419 | callArguments.append(WTFMove(*next)); |
| 2420 | return { makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "operator-"_str , WTFMove(callArguments)) }; |
| 2421 | } |
| 2422 | case Lexer::Token::Type::Tilde: { |
| 2423 | Vector<UniqueRef<AST::Expression>> callArguments; |
| 2424 | callArguments.append(WTFMove(*next)); |
| 2425 | return { makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "operator~"_str , WTFMove(callArguments)) }; |
| 2426 | } |
| 2427 | case Lexer::Token::Type::ExclamationPoint: { |
| 2428 | Vector<UniqueRef<AST::Expression>> castArguments; |
| 2429 | castArguments.append(WTFMove(*next)); |
| 2430 | auto boolCast = makeUniqueRef<AST::CallExpression>(Lexer::Token(*prefix), "bool"_str , WTFMove(castArguments)); |
| 2431 | return { makeUniqueRef<AST::LogicalNotExpression>(Lexer::Token(*prefix), WTFMove(boolCast)) }; |
| 2432 | } |
| 2433 | case Lexer::Token::Type::And: |
| 2434 | return { makeUniqueRef<AST::MakePointerExpression>(Lexer::Token(*prefix), WTFMove(*next)) }; |
| 2435 | case Lexer::Token::Type::At: |
| 2436 | return { makeUniqueRef<AST::MakeArrayReferenceExpression>(Lexer::Token(*prefix), WTFMove(*next)) }; |
| 2437 | default: |
| 2438 | ASSERT(prefix->type == Lexer::Token::Type::Star); |
| 2439 | return { makeUniqueRef<AST::DereferenceExpression>(Lexer::Token(*prefix), WTFMove(*next)) }; |
| 2440 | } |
| 2441 | } |
| 2442 | |
| 2443 | return parsePossibleSuffix(); |
| 2444 | } |
| 2445 | |
| 2446 | auto Parser::parsePossibleSuffix() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2447 | { |
| 2448 | auto suffix = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() -> Expected<UniqueRef<AST::Expression>, Error> { |
| 2449 | auto expression = parseCallExpression(); |
| 2450 | if (!expression) |
| 2451 | return Unexpected<Error>(expression.error()); |
| 2452 | |
| 2453 | while (true) { |
| 2454 | auto result = backtrackingScope<SuffixExpression>([&]() -> SuffixExpression { |
| 2455 | return parseLimitedSuffixOperator(WTFMove(*expression)); |
| 2456 | }); |
| 2457 | expression = WTFMove(result.result); |
| 2458 | if (!result) |
| 2459 | break; |
| 2460 | } |
| 2461 | return expression; |
| 2462 | }); |
| 2463 | if (suffix) |
| 2464 | return suffix; |
| 2465 | |
| 2466 | suffix = backtrackingScope<Expected<UniqueRef<AST::Expression>, Error>>([&]() -> Expected<UniqueRef<AST::Expression>, Error> { |
| 2467 | auto expression = parseTerm(); |
| 2468 | if (!expression) |
| 2469 | return Unexpected<Error>(expression.error()); |
| 2470 | |
| 2471 | while (true) { |
| 2472 | auto result = backtrackingScope<SuffixExpression>([&]() -> SuffixExpression { |
| 2473 | return parseSuffixOperator(WTFMove(*expression)); |
| 2474 | }); |
| 2475 | expression = WTFMove(result.result); |
| 2476 | if (!result) |
| 2477 | break; |
| 2478 | } |
| 2479 | return expression; |
| 2480 | }); |
| 2481 | if (suffix) |
| 2482 | return suffix; |
| 2483 | |
| 2484 | return Unexpected<Error>(suffix.error()); |
| 2485 | } |
| 2486 | |
| 2487 | auto Parser::parseCallExpression() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2488 | { |
| 2489 | auto name = consumeType(Lexer::Token::Type::Identifier); |
| 2490 | if (!name) |
| 2491 | return Unexpected<Error>(name.error()); |
| 2492 | auto callName = name->stringView.toString(); |
| 2493 | |
| 2494 | auto leftParenthesis = consumeType(Lexer::Token::Type::LeftParenthesis); |
| 2495 | if (!leftParenthesis) |
| 2496 | return Unexpected<Error>(leftParenthesis.error()); |
| 2497 | |
| 2498 | Vector<UniqueRef<AST::Expression>> arguments; |
| 2499 | if (tryType(Lexer::Token::Type::RightParenthesis)) |
| 2500 | return { makeUniqueRef<AST::CallExpression>(WTFMove(*name), WTFMove(callName), WTFMove(arguments)) }; |
| 2501 | |
| 2502 | auto firstArgument = parsePossibleTernaryConditional(); |
| 2503 | if (!firstArgument) |
| 2504 | return Unexpected<Error>(firstArgument.error()); |
| 2505 | arguments.append(WTFMove(*firstArgument)); |
| 2506 | while (tryType(Lexer::Token::Type::Comma)) { |
| 2507 | auto argument = parsePossibleTernaryConditional(); |
| 2508 | if (!argument) |
| 2509 | return Unexpected<Error>(argument.error()); |
| 2510 | arguments.append(WTFMove(*argument)); |
| 2511 | } |
| 2512 | |
| 2513 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 2514 | if (!rightParenthesis) |
| 2515 | return Unexpected<Error>(rightParenthesis.error()); |
| 2516 | |
| 2517 | return { makeUniqueRef<AST::CallExpression>(WTFMove(*name), WTFMove(callName), WTFMove(arguments)) }; |
| 2518 | } |
| 2519 | |
| 2520 | auto Parser::parseTerm() -> Expected<UniqueRef<AST::Expression>, Error> |
| 2521 | { |
| 2522 | auto type = consumeTypes({ |
| 2523 | Lexer::Token::Type::IntLiteral, |
| 2524 | Lexer::Token::Type::UintLiteral, |
| 2525 | Lexer::Token::Type::FloatLiteral, |
| 2526 | Lexer::Token::Type::Null, |
| 2527 | Lexer::Token::Type::True, |
| 2528 | Lexer::Token::Type::False, |
| 2529 | Lexer::Token::Type::Identifier, |
| 2530 | Lexer::Token::Type::LeftParenthesis |
| 2531 | }); |
| 2532 | if (!type) |
| 2533 | return Unexpected<Error>(type.error()); |
| 2534 | |
| 2535 | switch (type->type) { |
| 2536 | case Lexer::Token::Type::IntLiteral: { |
| 2537 | auto value = intLiteralToInt(type->stringView); |
| 2538 | if (!value) |
| 2539 | return Unexpected<Error>(value.error()); |
| 2540 | return { makeUniqueRef<AST::IntegerLiteral>(WTFMove(*type), *value) }; |
| 2541 | } |
| 2542 | case Lexer::Token::Type::UintLiteral: { |
| 2543 | auto value = uintLiteralToUint(type->stringView); |
| 2544 | if (!value) |
| 2545 | return Unexpected<Error>(value.error()); |
| 2546 | return { makeUniqueRef<AST::UnsignedIntegerLiteral>(WTFMove(*type), *value) }; |
| 2547 | } |
| 2548 | case Lexer::Token::Type::FloatLiteral: { |
| 2549 | auto value = floatLiteralToFloat(type->stringView); |
| 2550 | if (!value) |
| 2551 | return Unexpected<Error>(value.error()); |
| 2552 | return { makeUniqueRef<AST::FloatLiteral>(WTFMove(*type), *value) }; |
| 2553 | } |
| 2554 | case Lexer::Token::Type::Null: |
| 2555 | return { makeUniqueRef<AST::NullLiteral>(WTFMove(*type)) }; |
| 2556 | case Lexer::Token::Type::True: |
| 2557 | return { makeUniqueRef<AST::BooleanLiteral>(WTFMove(*type), true) }; |
| 2558 | case Lexer::Token::Type::False: |
| 2559 | return { makeUniqueRef<AST::BooleanLiteral>(WTFMove(*type), false) }; |
| 2560 | case Lexer::Token::Type::Identifier: { |
| 2561 | auto name = type->stringView.toString(); |
| 2562 | return { makeUniqueRef<AST::VariableReference>(WTFMove(*type), WTFMove(name)) }; |
| 2563 | } |
| 2564 | default: { |
| 2565 | ASSERT(type->type == Lexer::Token::Type::LeftParenthesis); |
| 2566 | auto expression = parseExpression(); |
| 2567 | if (!expression) |
| 2568 | return Unexpected<Error>(expression.error()); |
| 2569 | |
| 2570 | auto rightParenthesis = consumeType(Lexer::Token::Type::RightParenthesis); |
| 2571 | if (!rightParenthesis) |
| 2572 | return Unexpected<Error>(rightParenthesis.error()); |
| 2573 | |
| 2574 | return { WTFMove(*expression) }; |
| 2575 | } |
| 2576 | } |
| 2577 | } |
| 2578 | |
| 2579 | } // namespace WHLSL |
| 2580 | |
| 2581 | } // namespace WebCore |
| 2582 | |
| 2583 | #endif // ENABLE(WEBGPU) |
| 2584 | |