aboutsummaryrefslogtreecommitdiff
path: root/infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch
diff options
context:
space:
mode:
Diffstat (limited to 'infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch')
-rw-r--r--infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch223
1 files changed, 0 insertions, 223 deletions
diff --git a/infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch b/infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch
deleted file mode 100644
index e750b3c4e122..000000000000
--- a/infra/libkookie/nixpkgs/pkgs/development/tools/rust/rust-analyzer/no-loop-in-const-fn.patch
+++ /dev/null
@@ -1,223 +0,0 @@
-This patch revert 4b989009e3839cfc6f021d1552a46561cee6cde2 (CONST LOOPS ARE HERE).
-
-diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs
-index 5f885edfd..e72929f8c 100644
---- a/crates/parser/src/grammar/expressions.rs
-+++ b/crates/parser/src/grammar/expressions.rs
-@@ -316,7 +316,7 @@ fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option<CompletedMark
- }
-
- const LHS_FIRST: TokenSet =
-- atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]]));
-+ atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]);
-
- fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
- let m;
-diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
-index 66a92a4e1..ba6dd2fbc 100644
---- a/crates/parser/src/grammar/expressions/atom.rs
-+++ b/crates/parser/src/grammar/expressions/atom.rs
-@@ -15,7 +15,7 @@ use super::*;
- // let _ = b"e";
- // let _ = br"f";
- // }
--pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
-+pub(crate) const LITERAL_FIRST: TokenSet = token_set![
- TRUE_KW,
- FALSE_KW,
- INT_NUMBER,
-@@ -25,8 +25,8 @@ pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
- STRING,
- RAW_STRING,
- BYTE_STRING,
-- RAW_BYTE_STRING,
--]);
-+ RAW_BYTE_STRING
-+];
-
- pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
- if !p.at_ts(LITERAL_FIRST) {
-@@ -39,7 +39,7 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
-
- // E.g. for after the break in `if break {}`, this should not match
- pub(super) const ATOM_EXPR_FIRST: TokenSet =
-- LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
-+ LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![
- T!['('],
- T!['{'],
- T!['['],
-@@ -59,9 +59,9 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
- T![loop],
- T![for],
- LIFETIME,
-- ]));
-+ ]);
-
--const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[LET_KW, R_DOLLAR]);
-+const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR];
-
- pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
- if let Some(m) = literal(p) {
-diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs
-index 22810e6fb..8fd8f3b80 100644
---- a/crates/parser/src/grammar/items.rs
-+++ b/crates/parser/src/grammar/items.rs
-@@ -26,7 +26,7 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
- }
- }
-
--pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
-+pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![
- FN_KW,
- STRUCT_KW,
- ENUM_KW,
-@@ -41,7 +41,7 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[
- USE_KW,
- MACRO_KW,
- T![;],
--]);
-+];
-
- pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) {
- let m = p.start();
-diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs
-index 5d297e2d6..52562afa4 100644
---- a/crates/parser/src/grammar/paths.rs
-+++ b/crates/parser/src/grammar/paths.rs
-@@ -3,7 +3,7 @@
- use super::*;
-
- pub(super) const PATH_FIRST: TokenSet =
-- TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![:], T![<]]);
-+ token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]];
-
- pub(super) fn is_path_start(p: &Parser) -> bool {
- is_use_path_start(p) || p.at(T![<])
-diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs
-index 796f206e1..07b1d6dd5 100644
---- a/crates/parser/src/grammar/patterns.rs
-+++ b/crates/parser/src/grammar/patterns.rs
-@@ -2,18 +2,9 @@
-
- use super::*;
-
--pub(super) const PATTERN_FIRST: TokenSet =
-- expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
-- T![box],
-- T![ref],
-- T![mut],
-- T!['('],
-- T!['['],
-- T![&],
-- T![_],
-- T![-],
-- T![.],
-- ]));
-+pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
-+ .union(paths::PATH_FIRST)
-+ .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]);
-
- pub(crate) fn pattern(p: &mut Parser) {
- pattern_r(p, PAT_RECOVERY_SET);
-@@ -83,7 +74,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
- }
-
- const PAT_RECOVERY_SET: TokenSet =
-- TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]);
-+ token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA];
-
- fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
- let m = match p.nth(0) {
-diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
-index 1ea130ac5..9d00eb9b9 100644
---- a/crates/parser/src/grammar/types.rs
-+++ b/crates/parser/src/grammar/types.rs
-@@ -2,7 +2,7 @@
-
- use super::*;
-
--pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
-+pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
- T!['('],
- T!['['],
- T![<],
-@@ -16,16 +16,16 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[
- T![for],
- T![impl],
- T![dyn],
--]));
-+]);
-
--const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[
-+const TYPE_RECOVERY_SET: TokenSet = token_set![
- T![')'],
- T![,],
- L_DOLLAR,
- // test_err struct_field_recover
- // struct S { f pub g: () }
- T![pub],
--]);
-+];
-
- pub(crate) fn type_(p: &mut Parser) {
- type_with_bounds_cond(p, true);
-diff --git a/crates/parser/src/token_set.rs b/crates/parser/src/token_set.rs
-index a68f0144e..994017acf 100644
---- a/crates/parser/src/token_set.rs
-+++ b/crates/parser/src/token_set.rs
-@@ -9,21 +9,15 @@ pub(crate) struct TokenSet(u128);
- impl TokenSet {
- pub(crate) const EMPTY: TokenSet = TokenSet(0);
-
-- pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet {
-- let mut res = 0u128;
-- let mut i = 0;
-- while i < kinds.len() {
-- res |= mask(kinds[i]);
-- i += 1
-- }
-- TokenSet(res)
-+ pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet {
-+ TokenSet(mask(kind))
- }
-
- pub(crate) const fn union(self, other: TokenSet) -> TokenSet {
- TokenSet(self.0 | other.0)
- }
-
-- pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool {
-+ pub(crate) fn contains(&self, kind: SyntaxKind) -> bool {
- self.0 & mask(kind) != 0
- }
- }
-@@ -32,10 +26,16 @@ const fn mask(kind: SyntaxKind) -> u128 {
- 1u128 << (kind as usize)
- }
-
-+#[macro_export]
-+macro_rules! token_set {
-+ ($($t:expr),*) => { TokenSet::EMPTY$(.union(TokenSet::singleton($t)))* };
-+ ($($t:expr),* ,) => { token_set!($($t),*) };
-+}
-+
- #[test]
- fn token_set_works_for_tokens() {
- use crate::SyntaxKind::*;
-- let ts = TokenSet::new(&[EOF, SHEBANG]);
-+ let ts = token_set![EOF, SHEBANG];
- assert!(ts.contains(EOF));
- assert!(ts.contains(SHEBANG));
- assert!(!ts.contains(PLUS));
-diff --git a/xtask/src/install.rs b/xtask/src/install.rs
-index d829790d7..b25a6e301 100644
---- a/xtask/src/install.rs
-+++ b/xtask/src/install.rs
-@@ -7,7 +7,7 @@ use anyhow::{bail, format_err, Context, Result};
- use crate::not_bash::{pushd, run};
-
- // Latest stable, feel free to send a PR if this lags behind.
--const REQUIRED_RUST_VERSION: u32 = 46;
-+const REQUIRED_RUST_VERSION: u32 = 43;
-
- pub struct InstallCmd {
- pub client: Option<ClientOpt>,