Skip to content

Commit 252ae0f

Browse files
authored
Merge pull request #35 from Tsukuba-Programming-Lab/#34_UpdateBaseTraits
#34 Base周りの修正
2 parents 89b0bc5 + 0ca2528 commit 252ae0f

File tree

26 files changed

+72
-72
lines changed

26 files changed

+72
-72
lines changed

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/core/src/lib.rs

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,17 @@ use serde::{Serialize, Deserialize};
66
use serde_cbor::ser::to_vec_packed;
77
use serde_cbor::de::from_slice;
88

9-
use copager_lex::{LexSource, LexDriver};
10-
use copager_parse::{ParseSource, ParseDriver, ParseEvent};
9+
use copager_lex::{LexSource, BaseLexer};
10+
use copager_parse::{ParseSource, BaseParser, ParseEvent};
1111
use copager_ir::{IR, IRBuilder};
1212
use copager_utils::cache::Cacheable;
1313

14-
pub trait GrammarDesign {
14+
pub trait LanguageDesign {
1515
type Lex: LexSource;
1616
type Parse: ParseSource<<Self::Lex as LexSource>::Tag>;
1717
}
1818

19-
pub struct Grammar<Sl, Sp>
19+
pub struct Language<Sl, Sp>
2020
where
2121
Sl: LexSource,
2222
Sp: ParseSource<Sl::Tag>,
@@ -25,7 +25,7 @@ where
2525
_phantom_sp: PhantomData<Sp>,
2626
}
2727

28-
impl<Sl, Sp> GrammarDesign for Grammar<Sl, Sp>
28+
impl<Sl, Sp> LanguageDesign for Language<Sl, Sp>
2929
where
3030
Sl: LexSource,
3131
Sp: ParseSource<Sl::Tag>,
@@ -37,9 +37,9 @@ where
3737
#[derive(Debug, Serialize, Deserialize)]
3838
pub struct Processor<G, Dl, Dp>
3939
where
40-
G: GrammarDesign,
41-
Dl: LexDriver<G::Lex>,
42-
Dp: ParseDriver<G::Lex, G::Parse>,
40+
G: LanguageDesign,
41+
Dl: BaseLexer<G::Lex>,
42+
Dp: BaseParser<G::Lex, G::Parse>,
4343
{
4444
// Cache
4545
cache_lex: Option<Vec<u8>>,
@@ -62,9 +62,9 @@ where
6262

6363
impl<G, Dl, Dp> Processor<G, Dl, Dp>
6464
where
65-
G: GrammarDesign,
66-
Dl: LexDriver<G::Lex>,
67-
Dp: ParseDriver<G::Lex, G::Parse>,
65+
G: LanguageDesign,
66+
Dl: BaseLexer<G::Lex>,
67+
Dp: BaseParser<G::Lex, G::Parse>,
6868
{
6969
pub fn new() -> Self {
7070
Processor {
@@ -129,9 +129,9 @@ where
129129

130130
impl<G, Dl, Dp> Processor<G, Dl, Dp>
131131
where
132-
G: GrammarDesign,
133-
Dl: LexDriver<G::Lex> + Cacheable<G::Lex>,
134-
Dp: ParseDriver<G::Lex, G::Parse>,
132+
G: LanguageDesign,
133+
Dl: BaseLexer<G::Lex> + Cacheable<G::Lex>,
134+
Dp: BaseParser<G::Lex, G::Parse>,
135135
{
136136
pub fn prebuild_lexer(self) -> anyhow::Result<Self>
137137
where
@@ -159,9 +159,9 @@ where
159159

160160
impl<G, Dl, Dp> Processor<G, Dl, Dp>
161161
where
162-
G: GrammarDesign,
163-
Dl: LexDriver<G::Lex>,
164-
Dp: ParseDriver<G::Lex, G::Parse> + Cacheable<(G::Lex, G::Parse)>,
162+
G: LanguageDesign,
163+
Dl: BaseLexer<G::Lex>,
164+
Dp: BaseParser<G::Lex, G::Parse> + Cacheable<(G::Lex, G::Parse)>,
165165
{
166166
pub fn prebuild_parser(self) -> anyhow::Result<Self>
167167
where

crates/core/tests/prebuild.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use serde::{Serialize, Deserialize};
22
use serde_cbor::ser::to_vec_packed;
33
use serde_cbor::de::from_slice;
44

5-
use copager_core::{Grammar, Processor};
5+
use copager_core::{Language, Processor};
66
use copager_cfg::token::TokenTag;
77
use copager_cfg::rule::{RuleTag, Rule, RuleElem};
88
use copager_lex::LexSource;
@@ -54,10 +54,10 @@ enum ExprRule {
5454
Num,
5555
}
5656

57-
type MyGrammar = Grammar<ExprToken, ExprRule>;
57+
type MyLanguage = Language<ExprToken, ExprRule>;
5858
type MyLexer = RegexLexer<ExprToken>;
5959
type MyParser = LR1<ExprToken, ExprRule>;
60-
type MyProcessor = Processor<MyGrammar, MyLexer, MyParser>;
60+
type MyProcessor = Processor<MyLanguage, MyLexer, MyParser>;
6161

6262
#[test]
6363
fn prebuild() -> anyhow::Result<()> {

crates/core/tests/simple.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use serde::{Serialize, Deserialize};
22

3-
use copager_core::{Grammar, Processor};
3+
use copager_core::{Language, Processor};
44
use copager_cfg::token::TokenTag;
55
use copager_cfg::rule::{RuleTag, Rule, RuleElem};
66
use copager_lex::LexSource;
@@ -52,10 +52,10 @@ enum ExprRule {
5252
Num,
5353
}
5454

55-
type MyGrammar = Grammar<ExprToken, ExprRule>;
55+
type MyLanguage = Language<ExprToken, ExprRule>;
5656
type MyLexer = RegexLexer<ExprToken>;
5757
type MyParser = LR1<ExprToken, ExprRule>;
58-
type MyProcessor = Processor<MyGrammar, MyLexer, MyParser>;
58+
type MyProcessor = Processor<MyLanguage, MyLexer, MyParser>;
5959

6060
#[test]
6161
fn simple_success() -> anyhow::Result<()> {

crates/core/tests/simple_multiple.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use serde::{Serialize, Deserialize};
22

3-
use copager_core::{Grammar, Processor};
3+
use copager_core::{Language, Processor};
44
use copager_cfg::token::TokenTag;
55
use copager_cfg::rule::{RuleTag, Rule, RuleElem};
66
use copager_lex::LexSource;
@@ -52,10 +52,10 @@ enum ExprRule {
5252
Num,
5353
}
5454

55-
type MyGrammar = Grammar<ExprToken, ExprRule>;
55+
type MyLanguage = Language<ExprToken, ExprRule>;
5656
type MyLexer = RegexLexer<ExprToken>;
5757
type MyParser = LR1<ExprToken, ExprRule>;
58-
type MyProcessor = Processor<MyGrammar, MyLexer, MyParser>;
58+
type MyProcessor = Processor<MyLanguage, MyLexer, MyParser>;
5959

6060
const OK_INPUTS: [&str; 7] = [
6161
"1 + 2",

crates/ir_sexp/tests/simple.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use copager_core::{Grammar, Processor};
1+
use copager_core::{Language, Processor};
22
use copager_cfg::token::TokenTag;
33
use copager_cfg::rule::{RuleTag, Rule, RuleElem};
44
use copager_lex::LexSource;
@@ -64,7 +64,7 @@ fn simple_eval() {
6464
}
6565

6666
fn parse<'input>(input: &'input str) -> anyhow::Result<SExp<'input, ExprToken, ExprRule>> {
67-
type TestLang = Grammar<ExprToken, ExprRule>;
67+
type TestLang = Language<ExprToken, ExprRule>;
6868
type TestLexer = RegexLexer<ExprToken>;
6969
type TestParser = LR1<ExprToken, ExprRule>;
7070
type TestProcessor = Processor<TestLang, TestLexer, TestParser>;

crates/lex/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ pub trait LexSource {
99
fn iter(&self) -> impl Iterator<Item = Self::Tag>;
1010
}
1111

12-
pub trait LexDriver<S>
12+
pub trait BaseLexer<S>
1313
where
1414
Self: Sized,
1515
S: LexSource,

crates/lex_regex/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
use regex::{Regex, RegexSet};
44

55
use copager_cfg::token::{TokenTag, Token};
6-
use copager_lex::{LexSource, LexDriver};
6+
use copager_lex::{LexSource, BaseLexer};
77

88
#[derive(Debug)]
99
pub struct RegexLexer<S: LexSource> {
@@ -12,7 +12,7 @@ pub struct RegexLexer<S: LexSource> {
1212
regex_map: Vec<(Regex, S::Tag)>,
1313
}
1414

15-
impl<S: LexSource> LexDriver<S> for RegexLexer<S> {
15+
impl<S: LexSource> BaseLexer<S> for RegexLexer<S> {
1616
fn try_from(source: S) -> anyhow::Result<Self> {
1717
let regex_istr = Regex::new(source.ignore_token())?;
1818
let regex_set = source.iter()

crates/lex_regex/tests/simple.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
use copager_cfg::token::{TokenTag, Token};
2-
use copager_lex::{LexSource, LexDriver};
2+
use copager_lex::{LexSource, BaseLexer};
33
use copager_lex_regex::RegexLexer;
44

55
#[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq, LexSource)]
@@ -28,7 +28,7 @@ type MyLexer = RegexLexer<ExprToken>;
2828
#[test]
2929
fn simple_success() {
3030
let source = ExprToken::default();
31-
let lexer = <MyLexer as LexDriver<ExprToken>>::try_from(source).unwrap();
31+
let lexer = <MyLexer as BaseLexer<ExprToken>>::try_from(source).unwrap();
3232
let mut lexer = lexer.run("1 + 2 * 3");
3333
assert_eq_token(lexer.next(), "1");
3434
assert_eq_token(lexer.next(), "+");
@@ -42,7 +42,7 @@ fn simple_success() {
4242
#[should_panic]
4343
fn simple_failed() {
4444
let source = ExprToken::default();
45-
let lexer = <MyLexer as LexDriver<ExprToken>>::try_from(source).unwrap();
45+
let lexer = <MyLexer as BaseLexer<ExprToken>>::try_from(source).unwrap();
4646
let mut lexer = lexer.run("1 + 2 * stop 3");
4747
assert_eq_token(lexer.next(), "1");
4848
assert_eq_token(lexer.next(), "+");

crates/parse/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ pub trait ParseSource<T: TokenTag> {
2626
}
2727
}
2828

29-
pub trait ParseDriver<Sl, Sp>
29+
pub trait BaseParser<Sl, Sp>
3030
where
3131
Self: Sized,
3232
Sl: LexSource,

0 commit comments

Comments
 (0)