Added new connection patterns to metamodel. Updated editors. Bug

correction in MWE2 generation scripts of XText editors.
This commit is contained in:
Arnaud Dieumegard
2019-01-10 16:19:07 +01:00
parent 79820f4433
commit 3bf1aadfd6
26 changed files with 11762 additions and 10298 deletions

View File

@@ -727,10 +727,66 @@ ruleLinkType returns [Enumerator current=null]
)
|
(
enumLiteral_4='-{Shift}->'
enumLiteral_4='-{ShiftC}->'
{
$current = grammarAccess.getLinkTypeAccess().getSHIFTEnumLiteralDeclaration_4().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_4, grammarAccess.getLinkTypeAccess().getSHIFTEnumLiteralDeclaration_4());
$current = grammarAccess.getLinkTypeAccess().getSHIFTCEnumLiteralDeclaration_4().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_4, grammarAccess.getLinkTypeAccess().getSHIFTCEnumLiteralDeclaration_4());
}
)
|
(
enumLiteral_5='-{ShiftP}->'
{
$current = grammarAccess.getLinkTypeAccess().getSHIFTPEnumLiteralDeclaration_5().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_5, grammarAccess.getLinkTypeAccess().getSHIFTPEnumLiteralDeclaration_5());
}
)
|
(
enumLiteral_6='-{FlattenC}->'
{
$current = grammarAccess.getLinkTypeAccess().getFLATTENCEnumLiteralDeclaration_6().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_6, grammarAccess.getLinkTypeAccess().getFLATTENCEnumLiteralDeclaration_6());
}
)
|
(
enumLiteral_7='-{ForkC}->'
{
$current = grammarAccess.getLinkTypeAccess().getFORKCEnumLiteralDeclaration_7().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_7, grammarAccess.getLinkTypeAccess().getFORKCEnumLiteralDeclaration_7());
}
)
|
(
enumLiteral_8='-{ForkP}->'
{
$current = grammarAccess.getLinkTypeAccess().getFORKPEnumLiteralDeclaration_8().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_8, grammarAccess.getLinkTypeAccess().getFORKPEnumLiteralDeclaration_8());
}
)
|
(
enumLiteral_9='-{JoinC}->'
{
$current = grammarAccess.getLinkTypeAccess().getJOINCEnumLiteralDeclaration_9().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_9, grammarAccess.getLinkTypeAccess().getJOINCEnumLiteralDeclaration_9());
}
)
|
(
enumLiteral_10='-{JoinP}->'
{
$current = grammarAccess.getLinkTypeAccess().getJOINPEnumLiteralDeclaration_10().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_10, grammarAccess.getLinkTypeAccess().getJOINPEnumLiteralDeclaration_10());
}
)
|
(
enumLiteral_11='-{Last}->'
{
$current = grammarAccess.getLinkTypeAccess().getLASTEnumLiteralDeclaration_11().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_11, grammarAccess.getLinkTypeAccess().getLASTEnumLiteralDeclaration_11());
}
)
)

View File

@@ -2,9 +2,16 @@
')'=14
','=13
'-{First}->'=27
'-{FlattenC}->'=31
'-{ForkC}->'=32
'-{ForkP}->'=33
'-{Identity}->'=26
'-{JoinC}->'=34
'-{JoinP}->'=35
'-{Last}->'=36
'-{Rotate}->'=28
'-{Shift}->'=29
'-{ShiftC}->'=29
'-{ShiftP}->'=30
'-{Transpose}->'=25
'.'=20
'['=18
@@ -43,3 +50,10 @@ T__26=26
T__27=27
T__28=28
T__29=29
T__30=30
T__31=31
T__32=32
T__33=33
T__34=34
T__35=35
T__36=36

View File

@@ -20,10 +20,17 @@ public class InternalPatternLexer extends Lexer {
public static final int T__17=17;
public static final int T__18=18;
public static final int T__11=11;
public static final int T__33=33;
public static final int T__12=12;
public static final int T__34=34;
public static final int T__13=13;
public static final int T__35=35;
public static final int T__14=14;
public static final int T__36=36;
public static final int EOF=-1;
public static final int T__30=30;
public static final int T__31=31;
public static final int T__32=32;
public static final int RULE_ID=4;
public static final int RULE_WS=9;
public static final int RULE_ANY_OTHER=10;
@@ -428,10 +435,10 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = T__29;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:29:7: ( '-{Shift}->' )
// InternalPattern.g:29:9: '-{Shift}->'
// InternalPattern.g:29:7: ( '-{ShiftC}->' )
// InternalPattern.g:29:9: '-{ShiftC}->'
{
match("-{Shift}->");
match("-{ShiftC}->");
}
@@ -444,15 +451,162 @@ public class InternalPatternLexer extends Lexer {
}
// $ANTLR end "T__29"
// $ANTLR start "T__30"
public final void mT__30() throws RecognitionException {
try {
int _type = T__30;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:30:7: ( '-{ShiftP}->' )
// InternalPattern.g:30:9: '-{ShiftP}->'
{
match("-{ShiftP}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__30"
// $ANTLR start "T__31"
public final void mT__31() throws RecognitionException {
try {
int _type = T__31;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:31:7: ( '-{FlattenC}->' )
// InternalPattern.g:31:9: '-{FlattenC}->'
{
match("-{FlattenC}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__31"
// $ANTLR start "T__32"
public final void mT__32() throws RecognitionException {
try {
int _type = T__32;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:32:7: ( '-{ForkC}->' )
// InternalPattern.g:32:9: '-{ForkC}->'
{
match("-{ForkC}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__32"
// $ANTLR start "T__33"
public final void mT__33() throws RecognitionException {
try {
int _type = T__33;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:33:7: ( '-{ForkP}->' )
// InternalPattern.g:33:9: '-{ForkP}->'
{
match("-{ForkP}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__33"
// $ANTLR start "T__34"
public final void mT__34() throws RecognitionException {
try {
int _type = T__34;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:34:7: ( '-{JoinC}->' )
// InternalPattern.g:34:9: '-{JoinC}->'
{
match("-{JoinC}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__34"
// $ANTLR start "T__35"
public final void mT__35() throws RecognitionException {
try {
int _type = T__35;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:35:7: ( '-{JoinP}->' )
// InternalPattern.g:35:9: '-{JoinP}->'
{
match("-{JoinP}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__35"
// $ANTLR start "T__36"
public final void mT__36() throws RecognitionException {
try {
int _type = T__36;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:36:7: ( '-{Last}->' )
// InternalPattern.g:36:9: '-{Last}->'
{
match("-{Last}->");
}
state.type = _type;
state.channel = _channel;
}
finally {
}
}
// $ANTLR end "T__36"
// $ANTLR start "RULE_ID"
public final void mRULE_ID() throws RecognitionException {
try {
int _type = RULE_ID;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:739:9: ( ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )* )
// InternalPattern.g:739:11: ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*
// InternalPattern.g:795:9: ( ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )* )
// InternalPattern.g:795:11: ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*
{
// InternalPattern.g:739:11: ( '^' )?
// InternalPattern.g:795:11: ( '^' )?
int alt1=2;
int LA1_0 = input.LA(1);
@@ -461,7 +615,7 @@ public class InternalPatternLexer extends Lexer {
}
switch (alt1) {
case 1 :
// InternalPattern.g:739:11: '^'
// InternalPattern.g:795:11: '^'
{
match('^');
@@ -479,7 +633,7 @@ public class InternalPatternLexer extends Lexer {
recover(mse);
throw mse;}
// InternalPattern.g:739:40: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*
// InternalPattern.g:795:40: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )*
loop2:
do {
int alt2=2;
@@ -528,10 +682,10 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_INT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:741:10: ( ( '0' .. '9' )+ )
// InternalPattern.g:741:12: ( '0' .. '9' )+
// InternalPattern.g:797:10: ( ( '0' .. '9' )+ )
// InternalPattern.g:797:12: ( '0' .. '9' )+
{
// InternalPattern.g:741:12: ( '0' .. '9' )+
// InternalPattern.g:797:12: ( '0' .. '9' )+
int cnt3=0;
loop3:
do {
@@ -545,7 +699,7 @@ public class InternalPatternLexer extends Lexer {
switch (alt3) {
case 1 :
// InternalPattern.g:741:13: '0' .. '9'
// InternalPattern.g:797:13: '0' .. '9'
{
matchRange('0','9');
@@ -577,10 +731,10 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_STRING;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:743:13: ( ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' ) )
// InternalPattern.g:743:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' )
// InternalPattern.g:799:13: ( ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' ) )
// InternalPattern.g:799:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' )
{
// InternalPattern.g:743:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' )
// InternalPattern.g:799:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' )
int alt6=2;
int LA6_0 = input.LA(1);
@@ -598,10 +752,10 @@ public class InternalPatternLexer extends Lexer {
}
switch (alt6) {
case 1 :
// InternalPattern.g:743:16: '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"'
// InternalPattern.g:799:16: '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"'
{
match('\"');
// InternalPattern.g:743:20: ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )*
// InternalPattern.g:799:20: ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )*
loop4:
do {
int alt4=3;
@@ -617,7 +771,7 @@ public class InternalPatternLexer extends Lexer {
switch (alt4) {
case 1 :
// InternalPattern.g:743:21: '\\\\' .
// InternalPattern.g:799:21: '\\\\' .
{
match('\\');
matchAny();
@@ -625,7 +779,7 @@ public class InternalPatternLexer extends Lexer {
}
break;
case 2 :
// InternalPattern.g:743:28: ~ ( ( '\\\\' | '\"' ) )
// InternalPattern.g:799:28: ~ ( ( '\\\\' | '\"' ) )
{
if ( (input.LA(1)>='\u0000' && input.LA(1)<='!')||(input.LA(1)>='#' && input.LA(1)<='[')||(input.LA(1)>=']' && input.LA(1)<='\uFFFF') ) {
input.consume();
@@ -650,10 +804,10 @@ public class InternalPatternLexer extends Lexer {
}
break;
case 2 :
// InternalPattern.g:743:48: '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\''
// InternalPattern.g:799:48: '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\''
{
match('\'');
// InternalPattern.g:743:53: ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )*
// InternalPattern.g:799:53: ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )*
loop5:
do {
int alt5=3;
@@ -669,7 +823,7 @@ public class InternalPatternLexer extends Lexer {
switch (alt5) {
case 1 :
// InternalPattern.g:743:54: '\\\\' .
// InternalPattern.g:799:54: '\\\\' .
{
match('\\');
matchAny();
@@ -677,7 +831,7 @@ public class InternalPatternLexer extends Lexer {
}
break;
case 2 :
// InternalPattern.g:743:61: ~ ( ( '\\\\' | '\\'' ) )
// InternalPattern.g:799:61: ~ ( ( '\\\\' | '\\'' ) )
{
if ( (input.LA(1)>='\u0000' && input.LA(1)<='&')||(input.LA(1)>='(' && input.LA(1)<='[')||(input.LA(1)>=']' && input.LA(1)<='\uFFFF') ) {
input.consume();
@@ -720,12 +874,12 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_ML_COMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:745:17: ( '/*' ( options {greedy=false; } : . )* '*/' )
// InternalPattern.g:745:19: '/*' ( options {greedy=false; } : . )* '*/'
// InternalPattern.g:801:17: ( '/*' ( options {greedy=false; } : . )* '*/' )
// InternalPattern.g:801:19: '/*' ( options {greedy=false; } : . )* '*/'
{
match("/*");
// InternalPattern.g:745:24: ( options {greedy=false; } : . )*
// InternalPattern.g:801:24: ( options {greedy=false; } : . )*
loop7:
do {
int alt7=2;
@@ -750,7 +904,7 @@ public class InternalPatternLexer extends Lexer {
switch (alt7) {
case 1 :
// InternalPattern.g:745:52: .
// InternalPattern.g:801:52: .
{
matchAny();
@@ -780,12 +934,12 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_SL_COMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:747:17: ( '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )? )
// InternalPattern.g:747:19: '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )?
// InternalPattern.g:803:17: ( '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )? )
// InternalPattern.g:803:19: '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )?
{
match("//");
// InternalPattern.g:747:24: (~ ( ( '\\n' | '\\r' ) ) )*
// InternalPattern.g:803:24: (~ ( ( '\\n' | '\\r' ) ) )*
loop8:
do {
int alt8=2;
@@ -798,7 +952,7 @@ public class InternalPatternLexer extends Lexer {
switch (alt8) {
case 1 :
// InternalPattern.g:747:24: ~ ( ( '\\n' | '\\r' ) )
// InternalPattern.g:803:24: ~ ( ( '\\n' | '\\r' ) )
{
if ( (input.LA(1)>='\u0000' && input.LA(1)<='\t')||(input.LA(1)>='\u000B' && input.LA(1)<='\f')||(input.LA(1)>='\u000E' && input.LA(1)<='\uFFFF') ) {
input.consume();
@@ -818,7 +972,7 @@ public class InternalPatternLexer extends Lexer {
}
} while (true);
// InternalPattern.g:747:40: ( ( '\\r' )? '\\n' )?
// InternalPattern.g:803:40: ( ( '\\r' )? '\\n' )?
int alt10=2;
int LA10_0 = input.LA(1);
@@ -827,9 +981,9 @@ public class InternalPatternLexer extends Lexer {
}
switch (alt10) {
case 1 :
// InternalPattern.g:747:41: ( '\\r' )? '\\n'
// InternalPattern.g:803:41: ( '\\r' )? '\\n'
{
// InternalPattern.g:747:41: ( '\\r' )?
// InternalPattern.g:803:41: ( '\\r' )?
int alt9=2;
int LA9_0 = input.LA(1);
@@ -838,7 +992,7 @@ public class InternalPatternLexer extends Lexer {
}
switch (alt9) {
case 1 :
// InternalPattern.g:747:41: '\\r'
// InternalPattern.g:803:41: '\\r'
{
match('\r');
@@ -870,10 +1024,10 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_WS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:749:9: ( ( ' ' | '\\t' | '\\r' | '\\n' )+ )
// InternalPattern.g:749:11: ( ' ' | '\\t' | '\\r' | '\\n' )+
// InternalPattern.g:805:9: ( ( ' ' | '\\t' | '\\r' | '\\n' )+ )
// InternalPattern.g:805:11: ( ' ' | '\\t' | '\\r' | '\\n' )+
{
// InternalPattern.g:749:11: ( ' ' | '\\t' | '\\r' | '\\n' )+
// InternalPattern.g:805:11: ( ' ' | '\\t' | '\\r' | '\\n' )+
int cnt11=0;
loop11:
do {
@@ -927,8 +1081,8 @@ public class InternalPatternLexer extends Lexer {
try {
int _type = RULE_ANY_OTHER;
int _channel = DEFAULT_TOKEN_CHANNEL;
// InternalPattern.g:751:16: ( . )
// InternalPattern.g:751:18: .
// InternalPattern.g:807:16: ( . )
// InternalPattern.g:807:18: .
{
matchAny();
@@ -943,8 +1097,8 @@ public class InternalPatternLexer extends Lexer {
// $ANTLR end "RULE_ANY_OTHER"
public void mTokens() throws RecognitionException {
// InternalPattern.g:1:8: ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER )
int alt12=26;
// InternalPattern.g:1:8: ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | T__33 | T__34 | T__35 | T__36 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER )
int alt12=33;
alt12 = dfa12.predict(input);
switch (alt12) {
case 1 :
@@ -1081,49 +1235,98 @@ public class InternalPatternLexer extends Lexer {
}
break;
case 20 :
// InternalPattern.g:1:124: RULE_ID
// InternalPattern.g:1:124: T__30
{
mT__30();
}
break;
case 21 :
// InternalPattern.g:1:130: T__31
{
mT__31();
}
break;
case 22 :
// InternalPattern.g:1:136: T__32
{
mT__32();
}
break;
case 23 :
// InternalPattern.g:1:142: T__33
{
mT__33();
}
break;
case 24 :
// InternalPattern.g:1:148: T__34
{
mT__34();
}
break;
case 25 :
// InternalPattern.g:1:154: T__35
{
mT__35();
}
break;
case 26 :
// InternalPattern.g:1:160: T__36
{
mT__36();
}
break;
case 27 :
// InternalPattern.g:1:166: RULE_ID
{
mRULE_ID();
}
break;
case 21 :
// InternalPattern.g:1:132: RULE_INT
case 28 :
// InternalPattern.g:1:174: RULE_INT
{
mRULE_INT();
}
break;
case 22 :
// InternalPattern.g:1:141: RULE_STRING
case 29 :
// InternalPattern.g:1:183: RULE_STRING
{
mRULE_STRING();
}
break;
case 23 :
// InternalPattern.g:1:153: RULE_ML_COMMENT
case 30 :
// InternalPattern.g:1:195: RULE_ML_COMMENT
{
mRULE_ML_COMMENT();
}
break;
case 24 :
// InternalPattern.g:1:169: RULE_SL_COMMENT
case 31 :
// InternalPattern.g:1:211: RULE_SL_COMMENT
{
mRULE_SL_COMMENT();
}
break;
case 25 :
// InternalPattern.g:1:185: RULE_WS
case 32 :
// InternalPattern.g:1:227: RULE_WS
{
mRULE_WS();
}
break;
case 26 :
// InternalPattern.g:1:193: RULE_ANY_OTHER
case 33 :
// InternalPattern.g:1:235: RULE_ANY_OTHER
{
mRULE_ANY_OTHER();
@@ -1137,17 +1340,17 @@ public class InternalPatternLexer extends Lexer {
protected DFA12 dfa12 = new DFA12(this);
static final String DFA12_eotS =
"\1\uffff\1\30\5\uffff\1\30\3\uffff\3\30\2\26\2\uffff\3\26\2\uffff\1\30\6\uffff\1\30\3\uffff\1\30\1\57\1\30\6\uffff\4\30\1\uffff\1\72\5\uffff\4\30\1\uffff\3\30\1\102\2\30\1\105\1\uffff\1\106\1\30\2\uffff\1\30\1\111\1\uffff";
"\1\uffff\1\30\5\uffff\1\30\3\uffff\3\30\2\26\2\uffff\3\26\2\uffff\1\30\6\uffff\1\30\3\uffff\1\30\1\57\1\30\6\uffff\4\30\1\uffff\1\74\7\uffff\4\30\6\uffff\3\30\1\114\3\uffff\2\30\1\122\4\uffff\1\130\1\30\7\uffff\1\30\2\uffff\1\135\1\uffff";
static final String DFA12_eofS =
"\112\uffff";
"\136\uffff";
static final String DFA12_minS =
"\1\0\1\141\5\uffff\1\157\3\uffff\2\156\1\165\1\173\1\101\2\uffff\2\0\1\52\2\uffff\1\164\6\uffff\1\155\3\uffff\1\163\1\60\1\164\1\106\5\uffff\1\164\2\160\1\165\1\uffff\1\60\5\uffff\1\145\1\157\1\145\1\164\1\uffff\1\162\1\156\1\143\1\60\1\156\1\145\1\60\1\uffff\1\60\1\156\2\uffff\1\164\1\60\1\uffff";
"\1\0\1\141\5\uffff\1\157\3\uffff\2\156\1\165\1\173\1\101\2\uffff\2\0\1\52\2\uffff\1\164\6\uffff\1\155\3\uffff\1\163\1\60\1\164\1\106\5\uffff\1\164\2\160\1\165\1\uffff\1\60\2\uffff\1\151\1\uffff\1\150\1\157\1\uffff\1\145\1\157\1\145\1\164\3\uffff\1\162\2\151\1\162\1\156\1\143\1\60\1\153\1\146\2\156\1\145\1\60\1\uffff\1\103\1\164\1\103\1\60\1\156\3\uffff\1\103\3\uffff\1\164\2\uffff\1\60\1\uffff";
static final String DFA12_maxS =
"\1\uffff\1\141\5\uffff\1\157\3\uffff\2\156\1\165\1\173\1\172\2\uffff\2\uffff\1\57\2\uffff\1\164\6\uffff\1\155\3\uffff\1\163\1\172\1\164\1\124\5\uffff\1\164\2\160\1\165\1\uffff\1\172\5\uffff\1\145\1\157\1\145\1\164\1\uffff\1\162\1\156\1\143\1\172\1\156\1\145\1\172\1\uffff\1\172\1\156\2\uffff\1\164\1\172\1\uffff";
"\1\uffff\1\141\5\uffff\1\157\3\uffff\2\156\1\165\1\173\1\172\2\uffff\2\uffff\1\57\2\uffff\1\164\6\uffff\1\155\3\uffff\1\163\1\172\1\164\1\124\5\uffff\1\164\2\160\1\165\1\uffff\1\172\2\uffff\1\157\1\uffff\1\150\1\157\1\uffff\1\145\1\157\1\145\1\164\3\uffff\1\162\2\151\1\162\1\156\1\143\1\172\1\153\1\146\2\156\1\145\1\172\1\uffff\1\120\1\164\1\120\1\172\1\156\3\uffff\1\120\3\uffff\1\164\2\uffff\1\172\1\uffff";
static final String DFA12_acceptS =
"\2\uffff\1\2\1\3\1\4\1\5\1\6\1\uffff\1\10\1\11\1\12\5\uffff\1\24\1\25\3\uffff\1\31\1\32\1\uffff\1\24\1\2\1\3\1\4\1\5\1\6\1\uffff\1\10\1\11\1\12\4\uffff\1\25\1\26\1\27\1\30\1\31\4\uffff\1\14\1\uffff\1\17\1\20\1\21\1\22\1\23\4\uffff\1\15\7\uffff\1\16\2\uffff\1\13\1\1\2\uffff\1\7";
"\2\uffff\1\2\1\3\1\4\1\5\1\6\1\uffff\1\10\1\11\1\12\5\uffff\1\33\1\34\3\uffff\1\40\1\41\1\uffff\1\33\1\2\1\3\1\4\1\5\1\6\1\uffff\1\10\1\11\1\12\4\uffff\1\34\1\35\1\36\1\37\1\40\4\uffff\1\14\1\uffff\1\17\1\20\1\uffff\1\22\2\uffff\1\32\4\uffff\1\15\1\21\1\25\15\uffff\1\16\5\uffff\1\13\1\26\1\27\1\uffff\1\30\1\31\1\1\1\uffff\1\23\1\24\1\uffff\1\7";
static final String DFA12_specialS =
"\1\1\21\uffff\1\2\1\0\66\uffff}>";
"\1\1\21\uffff\1\2\1\0\112\uffff}>";
static final String[] DFA12_transitionS = {
"\11\26\2\25\2\26\1\25\22\26\1\25\1\26\1\22\4\26\1\23\1\2\1\4\2\26\1\3\1\16\1\12\1\24\12\21\7\26\32\20\1\10\1\26\1\11\1\17\1\20\1\26\2\20\1\7\5\20\1\14\5\20\1\15\1\1\4\20\1\13\5\20\1\5\1\26\1\6\uff82\26",
"\1\27",
@@ -1186,41 +1389,61 @@ public class InternalPatternLexer extends Lexer {
"\1\55",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\16\30\1\56\13\30",
"\1\60",
"\1\63\2\uffff\1\62\10\uffff\1\64\1\65\1\61",
"\1\63\2\uffff\1\62\1\66\1\uffff\1\67\5\uffff\1\64\1\65\1\61",
"",
"",
"",
"",
"",
"\1\66",
"\1\67",
"\1\70",
"\1\71",
"\1\72",
"\1\73",
"",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"",
"",
"\1\75\2\uffff\1\76\2\uffff\1\77",
"",
"",
"",
"\1\73",
"\1\74",
"\1\75",
"\1\76",
"",
"\1\77",
"\1\100",
"\1\101",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"",
"\1\102",
"\1\103",
"\1\104",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"\1\105",
"",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"",
"",
"\1\106",
"\1\107",
"",
"",
"\1\110",
"\1\111",
"\1\112",
"\1\113",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"\1\115",
"\1\116",
"\1\117",
"\1\120",
"\1\121",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"",
"\1\123\14\uffff\1\124",
"\1\125",
"\1\126\14\uffff\1\127",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
"\1\131",
"",
"",
"",
"\1\132\14\uffff\1\133",
"",
"",
"",
"\1\134",
"",
"",
"\12\30\7\uffff\32\30\4\uffff\1\30\1\uffff\32\30",
""
};
@@ -1255,7 +1478,7 @@ public class InternalPatternLexer extends Lexer {
this.transition = DFA12_transition;
}
public String getDescription() {
return "1:1: Tokens : ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER );";
return "1:1: Tokens : ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | T__33 | T__34 | T__35 | T__36 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER );";
}
public int specialStateTransition(int s, IntStream _input) throws NoViableAltException {
IntStream input = _input;

View File

@@ -22,7 +22,7 @@ import java.util.ArrayList;
@SuppressWarnings("all")
public class InternalPatternParser extends AbstractInternalAntlrParser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "RULE_ID", "RULE_INT", "RULE_STRING", "RULE_ML_COMMENT", "RULE_SL_COMMENT", "RULE_WS", "RULE_ANY_OTHER", "'pattern'", "'('", "','", "')'", "'{'", "'}'", "'component'", "'['", "']'", "'.'", "'unspec'", "'in'", "'out'", "'inout'", "'-{Transpose}->'", "'-{Identity}->'", "'-{First}->'", "'-{Rotate}->'", "'-{Shift}->'"
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "RULE_ID", "RULE_INT", "RULE_STRING", "RULE_ML_COMMENT", "RULE_SL_COMMENT", "RULE_WS", "RULE_ANY_OTHER", "'pattern'", "'('", "','", "')'", "'{'", "'}'", "'component'", "'['", "']'", "'.'", "'unspec'", "'in'", "'out'", "'inout'", "'-{Transpose}->'", "'-{Identity}->'", "'-{First}->'", "'-{Rotate}->'", "'-{ShiftC}->'", "'-{ShiftP}->'", "'-{FlattenC}->'", "'-{ForkC}->'", "'-{ForkP}->'", "'-{JoinC}->'", "'-{JoinP}->'", "'-{Last}->'"
};
public static final int RULE_STRING=6;
public static final int RULE_SL_COMMENT=8;
@@ -32,10 +32,17 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
public static final int T__17=17;
public static final int T__18=18;
public static final int T__11=11;
public static final int T__33=33;
public static final int T__12=12;
public static final int T__34=34;
public static final int T__13=13;
public static final int T__35=35;
public static final int T__14=14;
public static final int T__36=36;
public static final int EOF=-1;
public static final int T__30=30;
public static final int T__31=31;
public static final int T__32=32;
public static final int RULE_ID=4;
public static final int RULE_WS=9;
public static final int RULE_ANY_OTHER=10;
@@ -1666,7 +1673,7 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
// $ANTLR start "ruleLinkType"
// InternalPattern.g:689:1: ruleLinkType returns [Enumerator current=null] : ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{Shift}->' ) ) ;
// InternalPattern.g:689:1: ruleLinkType returns [Enumerator current=null] : ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{ShiftC}->' ) | (enumLiteral_5= '-{ShiftP}->' ) | (enumLiteral_6= '-{FlattenC}->' ) | (enumLiteral_7= '-{ForkC}->' ) | (enumLiteral_8= '-{ForkP}->' ) | (enumLiteral_9= '-{JoinC}->' ) | (enumLiteral_10= '-{JoinP}->' ) | (enumLiteral_11= '-{Last}->' ) ) ;
public final Enumerator ruleLinkType() throws RecognitionException {
Enumerator current = null;
@@ -1675,16 +1682,23 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
Token enumLiteral_2=null;
Token enumLiteral_3=null;
Token enumLiteral_4=null;
Token enumLiteral_5=null;
Token enumLiteral_6=null;
Token enumLiteral_7=null;
Token enumLiteral_8=null;
Token enumLiteral_9=null;
Token enumLiteral_10=null;
Token enumLiteral_11=null;
enterRule();
try {
// InternalPattern.g:695:2: ( ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{Shift}->' ) ) )
// InternalPattern.g:696:2: ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{Shift}->' ) )
// InternalPattern.g:695:2: ( ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{ShiftC}->' ) | (enumLiteral_5= '-{ShiftP}->' ) | (enumLiteral_6= '-{FlattenC}->' ) | (enumLiteral_7= '-{ForkC}->' ) | (enumLiteral_8= '-{ForkP}->' ) | (enumLiteral_9= '-{JoinC}->' ) | (enumLiteral_10= '-{JoinP}->' ) | (enumLiteral_11= '-{Last}->' ) ) )
// InternalPattern.g:696:2: ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{ShiftC}->' ) | (enumLiteral_5= '-{ShiftP}->' ) | (enumLiteral_6= '-{FlattenC}->' ) | (enumLiteral_7= '-{ForkC}->' ) | (enumLiteral_8= '-{ForkP}->' ) | (enumLiteral_9= '-{JoinC}->' ) | (enumLiteral_10= '-{JoinP}->' ) | (enumLiteral_11= '-{Last}->' ) )
{
// InternalPattern.g:696:2: ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{Shift}->' ) )
int alt15=5;
// InternalPattern.g:696:2: ( (enumLiteral_0= '-{Transpose}->' ) | (enumLiteral_1= '-{Identity}->' ) | (enumLiteral_2= '-{First}->' ) | (enumLiteral_3= '-{Rotate}->' ) | (enumLiteral_4= '-{ShiftC}->' ) | (enumLiteral_5= '-{ShiftP}->' ) | (enumLiteral_6= '-{FlattenC}->' ) | (enumLiteral_7= '-{ForkC}->' ) | (enumLiteral_8= '-{ForkP}->' ) | (enumLiteral_9= '-{JoinC}->' ) | (enumLiteral_10= '-{JoinP}->' ) | (enumLiteral_11= '-{Last}->' ) )
int alt15=12;
switch ( input.LA(1) ) {
case 25:
{
@@ -1711,6 +1725,41 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
alt15=5;
}
break;
case 30:
{
alt15=6;
}
break;
case 31:
{
alt15=7;
}
break;
case 32:
{
alt15=8;
}
break;
case 33:
{
alt15=9;
}
break;
case 34:
{
alt15=10;
}
break;
case 35:
{
alt15=11;
}
break;
case 36:
{
alt15=12;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 15, 0, input);
@@ -1788,15 +1837,134 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
}
break;
case 5 :
// InternalPattern.g:729:3: (enumLiteral_4= '-{Shift}->' )
// InternalPattern.g:729:3: (enumLiteral_4= '-{ShiftC}->' )
{
// InternalPattern.g:729:3: (enumLiteral_4= '-{Shift}->' )
// InternalPattern.g:730:4: enumLiteral_4= '-{Shift}->'
// InternalPattern.g:729:3: (enumLiteral_4= '-{ShiftC}->' )
// InternalPattern.g:730:4: enumLiteral_4= '-{ShiftC}->'
{
enumLiteral_4=(Token)match(input,29,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getSHIFTEnumLiteralDeclaration_4().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_4, grammarAccess.getLinkTypeAccess().getSHIFTEnumLiteralDeclaration_4());
current = grammarAccess.getLinkTypeAccess().getSHIFTCEnumLiteralDeclaration_4().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_4, grammarAccess.getLinkTypeAccess().getSHIFTCEnumLiteralDeclaration_4());
}
}
break;
case 6 :
// InternalPattern.g:737:3: (enumLiteral_5= '-{ShiftP}->' )
{
// InternalPattern.g:737:3: (enumLiteral_5= '-{ShiftP}->' )
// InternalPattern.g:738:4: enumLiteral_5= '-{ShiftP}->'
{
enumLiteral_5=(Token)match(input,30,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getSHIFTPEnumLiteralDeclaration_5().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_5, grammarAccess.getLinkTypeAccess().getSHIFTPEnumLiteralDeclaration_5());
}
}
break;
case 7 :
// InternalPattern.g:745:3: (enumLiteral_6= '-{FlattenC}->' )
{
// InternalPattern.g:745:3: (enumLiteral_6= '-{FlattenC}->' )
// InternalPattern.g:746:4: enumLiteral_6= '-{FlattenC}->'
{
enumLiteral_6=(Token)match(input,31,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getFLATTENCEnumLiteralDeclaration_6().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_6, grammarAccess.getLinkTypeAccess().getFLATTENCEnumLiteralDeclaration_6());
}
}
break;
case 8 :
// InternalPattern.g:753:3: (enumLiteral_7= '-{ForkC}->' )
{
// InternalPattern.g:753:3: (enumLiteral_7= '-{ForkC}->' )
// InternalPattern.g:754:4: enumLiteral_7= '-{ForkC}->'
{
enumLiteral_7=(Token)match(input,32,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getFORKCEnumLiteralDeclaration_7().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_7, grammarAccess.getLinkTypeAccess().getFORKCEnumLiteralDeclaration_7());
}
}
break;
case 9 :
// InternalPattern.g:761:3: (enumLiteral_8= '-{ForkP}->' )
{
// InternalPattern.g:761:3: (enumLiteral_8= '-{ForkP}->' )
// InternalPattern.g:762:4: enumLiteral_8= '-{ForkP}->'
{
enumLiteral_8=(Token)match(input,33,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getFORKPEnumLiteralDeclaration_8().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_8, grammarAccess.getLinkTypeAccess().getFORKPEnumLiteralDeclaration_8());
}
}
break;
case 10 :
// InternalPattern.g:769:3: (enumLiteral_9= '-{JoinC}->' )
{
// InternalPattern.g:769:3: (enumLiteral_9= '-{JoinC}->' )
// InternalPattern.g:770:4: enumLiteral_9= '-{JoinC}->'
{
enumLiteral_9=(Token)match(input,34,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getJOINCEnumLiteralDeclaration_9().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_9, grammarAccess.getLinkTypeAccess().getJOINCEnumLiteralDeclaration_9());
}
}
break;
case 11 :
// InternalPattern.g:777:3: (enumLiteral_10= '-{JoinP}->' )
{
// InternalPattern.g:777:3: (enumLiteral_10= '-{JoinP}->' )
// InternalPattern.g:778:4: enumLiteral_10= '-{JoinP}->'
{
enumLiteral_10=(Token)match(input,35,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getJOINPEnumLiteralDeclaration_10().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_10, grammarAccess.getLinkTypeAccess().getJOINPEnumLiteralDeclaration_10());
}
}
break;
case 12 :
// InternalPattern.g:785:3: (enumLiteral_11= '-{Last}->' )
{
// InternalPattern.g:785:3: (enumLiteral_11= '-{Last}->' )
// InternalPattern.g:786:4: enumLiteral_11= '-{Last}->'
{
enumLiteral_11=(Token)match(input,36,FOLLOW_2);
current = grammarAccess.getLinkTypeAccess().getLASTEnumLiteralDeclaration_11().getEnumLiteral().getInstance();
newLeafNode(enumLiteral_11, grammarAccess.getLinkTypeAccess().getLASTEnumLiteralDeclaration_11());
}
@@ -1844,7 +2012,7 @@ public class InternalPatternParser extends AbstractInternalAntlrParser {
public static final BitSet FOLLOW_12 = new BitSet(new long[]{0x0000000001E04000L});
public static final BitSet FOLLOW_13 = new BitSet(new long[]{0x0000000001E00000L});
public static final BitSet FOLLOW_14 = new BitSet(new long[]{0x0000000000040002L});
public static final BitSet FOLLOW_15 = new BitSet(new long[]{0x000000003E000000L});
public static final BitSet FOLLOW_15 = new BitSet(new long[]{0x0000001FFE000000L});
public static final BitSet FOLLOW_16 = new BitSet(new long[]{0x0000000000020010L});
public static final BitSet FOLLOW_17 = new BitSet(new long[]{0x0000000000100002L});

View File

@@ -3,18 +3,6 @@
*/
package com.irtsaintexupery.xtext.pseim.serializer;
import java.util.Set;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.xtext.Action;
import org.eclipse.xtext.Parameter;
import org.eclipse.xtext.ParserRule;
import org.eclipse.xtext.serializer.ISerializationContext;
import org.eclipse.xtext.serializer.acceptor.SequenceFeeder;
import org.eclipse.xtext.serializer.sequencer.AbstractDelegatingSemanticSequencer;
import org.eclipse.xtext.serializer.sequencer.ITransientValueService.ValueTransient;
import com.google.inject.Inject;
import com.irtsaintexupery.pseim.pseim.CardinalityElement;
import com.irtsaintexupery.pseim.pseim.Pattern;
@@ -24,6 +12,16 @@ import com.irtsaintexupery.pseim.pseim.PatternPort;
import com.irtsaintexupery.pseim.pseim.PseimPackage;
import com.irtsaintexupery.pseim.seim.SeimPackage;
import com.irtsaintexupery.xtext.pseim.services.PatternGrammarAccess;
import java.util.Set;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.xtext.Action;
import org.eclipse.xtext.Parameter;
import org.eclipse.xtext.ParserRule;
import org.eclipse.xtext.serializer.ISerializationContext;
import org.eclipse.xtext.serializer.acceptor.SequenceFeeder;
import org.eclipse.xtext.serializer.sequencer.AbstractDelegatingSemanticSequencer;
import org.eclipse.xtext.serializer.sequencer.ITransientValueService.ValueTransient;
@SuppressWarnings("all")
public class PatternSemanticSequencer extends AbstractDelegatingSemanticSequencer {

View File

@@ -453,15 +453,32 @@ public class PatternGrammarAccess extends AbstractGrammarElementFinder {
private final Keyword cFIRSTFirstKeyword_2_0 = (Keyword)cFIRSTEnumLiteralDeclaration_2.eContents().get(0);
private final EnumLiteralDeclaration cROTATEEnumLiteralDeclaration_3 = (EnumLiteralDeclaration)cAlternatives.eContents().get(3);
private final Keyword cROTATERotateKeyword_3_0 = (Keyword)cROTATEEnumLiteralDeclaration_3.eContents().get(0);
private final EnumLiteralDeclaration cSHIFTEnumLiteralDeclaration_4 = (EnumLiteralDeclaration)cAlternatives.eContents().get(4);
private final Keyword cSHIFTShiftKeyword_4_0 = (Keyword)cSHIFTEnumLiteralDeclaration_4.eContents().get(0);
private final EnumLiteralDeclaration cSHIFTCEnumLiteralDeclaration_4 = (EnumLiteralDeclaration)cAlternatives.eContents().get(4);
private final Keyword cSHIFTCShiftCKeyword_4_0 = (Keyword)cSHIFTCEnumLiteralDeclaration_4.eContents().get(0);
private final EnumLiteralDeclaration cSHIFTPEnumLiteralDeclaration_5 = (EnumLiteralDeclaration)cAlternatives.eContents().get(5);
private final Keyword cSHIFTPShiftPKeyword_5_0 = (Keyword)cSHIFTPEnumLiteralDeclaration_5.eContents().get(0);
private final EnumLiteralDeclaration cFLATTENCEnumLiteralDeclaration_6 = (EnumLiteralDeclaration)cAlternatives.eContents().get(6);
private final Keyword cFLATTENCFlattenCKeyword_6_0 = (Keyword)cFLATTENCEnumLiteralDeclaration_6.eContents().get(0);
private final EnumLiteralDeclaration cFORKCEnumLiteralDeclaration_7 = (EnumLiteralDeclaration)cAlternatives.eContents().get(7);
private final Keyword cFORKCForkCKeyword_7_0 = (Keyword)cFORKCEnumLiteralDeclaration_7.eContents().get(0);
private final EnumLiteralDeclaration cFORKPEnumLiteralDeclaration_8 = (EnumLiteralDeclaration)cAlternatives.eContents().get(8);
private final Keyword cFORKPForkPKeyword_8_0 = (Keyword)cFORKPEnumLiteralDeclaration_8.eContents().get(0);
private final EnumLiteralDeclaration cJOINCEnumLiteralDeclaration_9 = (EnumLiteralDeclaration)cAlternatives.eContents().get(9);
private final Keyword cJOINCJoinCKeyword_9_0 = (Keyword)cJOINCEnumLiteralDeclaration_9.eContents().get(0);
private final EnumLiteralDeclaration cJOINPEnumLiteralDeclaration_10 = (EnumLiteralDeclaration)cAlternatives.eContents().get(10);
private final Keyword cJOINPJoinPKeyword_10_0 = (Keyword)cJOINPEnumLiteralDeclaration_10.eContents().get(0);
private final EnumLiteralDeclaration cLASTEnumLiteralDeclaration_11 = (EnumLiteralDeclaration)cAlternatives.eContents().get(11);
private final Keyword cLASTLastKeyword_11_0 = (Keyword)cLASTEnumLiteralDeclaration_11.eContents().get(0);
//enum LinkType:
// TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->' |
// SHIFT='-{Shift}->';
// TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->'
// | SHIFTC='-{ShiftC}->' | SHIFTP='-{ShiftP}->' | FLATTENC='-{FlattenC}->' | FORKC='-{ForkC}->' | FORKP='-{ForkP}->'
// | JOINC='-{JoinC}->' | JOINP='-{JoinP}->' | LAST='-{Last}->';
public EnumRule getRule() { return rule; }
//TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->' | SHIFT='-{Shift}->'
//TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->' | SHIFTC='-{ShiftC}->'
//| SHIFTP='-{ShiftP}->' | FLATTENC='-{FlattenC}->' | FORKC='-{ForkC}->' | FORKP='-{ForkP}->' | JOINC='-{JoinC}->' |
//JOINP='-{JoinP}->' | LAST='-{Last}->'
public Alternatives getAlternatives() { return cAlternatives; }
//TRANSPOSE='-{Transpose}->'
@@ -488,11 +505,53 @@ public class PatternGrammarAccess extends AbstractGrammarElementFinder {
//'-{Rotate}->'
public Keyword getROTATERotateKeyword_3_0() { return cROTATERotateKeyword_3_0; }
//SHIFT='-{Shift}->'
public EnumLiteralDeclaration getSHIFTEnumLiteralDeclaration_4() { return cSHIFTEnumLiteralDeclaration_4; }
//SHIFTC='-{ShiftC}->'
public EnumLiteralDeclaration getSHIFTCEnumLiteralDeclaration_4() { return cSHIFTCEnumLiteralDeclaration_4; }
//'-{Shift}->'
public Keyword getSHIFTShiftKeyword_4_0() { return cSHIFTShiftKeyword_4_0; }
//'-{ShiftC}->'
public Keyword getSHIFTCShiftCKeyword_4_0() { return cSHIFTCShiftCKeyword_4_0; }
//SHIFTP='-{ShiftP}->'
public EnumLiteralDeclaration getSHIFTPEnumLiteralDeclaration_5() { return cSHIFTPEnumLiteralDeclaration_5; }
//'-{ShiftP}->'
public Keyword getSHIFTPShiftPKeyword_5_0() { return cSHIFTPShiftPKeyword_5_0; }
//FLATTENC='-{FlattenC}->'
public EnumLiteralDeclaration getFLATTENCEnumLiteralDeclaration_6() { return cFLATTENCEnumLiteralDeclaration_6; }
//'-{FlattenC}->'
public Keyword getFLATTENCFlattenCKeyword_6_0() { return cFLATTENCFlattenCKeyword_6_0; }
//FORKC='-{ForkC}->'
public EnumLiteralDeclaration getFORKCEnumLiteralDeclaration_7() { return cFORKCEnumLiteralDeclaration_7; }
//'-{ForkC}->'
public Keyword getFORKCForkCKeyword_7_0() { return cFORKCForkCKeyword_7_0; }
//FORKP='-{ForkP}->'
public EnumLiteralDeclaration getFORKPEnumLiteralDeclaration_8() { return cFORKPEnumLiteralDeclaration_8; }
//'-{ForkP}->'
public Keyword getFORKPForkPKeyword_8_0() { return cFORKPForkPKeyword_8_0; }
//JOINC='-{JoinC}->'
public EnumLiteralDeclaration getJOINCEnumLiteralDeclaration_9() { return cJOINCEnumLiteralDeclaration_9; }
//'-{JoinC}->'
public Keyword getJOINCJoinCKeyword_9_0() { return cJOINCJoinCKeyword_9_0; }
//JOINP='-{JoinP}->'
public EnumLiteralDeclaration getJOINPEnumLiteralDeclaration_10() { return cJOINPEnumLiteralDeclaration_10; }
//'-{JoinP}->'
public Keyword getJOINPJoinPKeyword_10_0() { return cJOINPJoinPKeyword_10_0; }
//LAST='-{Last}->'
public EnumLiteralDeclaration getLASTEnumLiteralDeclaration_11() { return cLASTEnumLiteralDeclaration_11; }
//'-{Last}->'
public Keyword getLASTLastKeyword_11_0() { return cLASTLastKeyword_11_0; }
}
public class PortDirectionElements extends AbstractEnumRuleElementFinder {
private final EnumRule rule = (EnumRule) GrammarUtil.findRuleForName(getGrammar(), "com.irtsaintexupery.xtext.pseim.Pattern.PortDirection");
@@ -660,8 +719,9 @@ public class PatternGrammarAccess extends AbstractGrammarElementFinder {
}
//enum LinkType:
// TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->' |
// SHIFT='-{Shift}->';
// TRANSPOSE='-{Transpose}->' | IDENTITY='-{Identity}->' | FIRST='-{First}->' | ROTATE='-{Rotate}->'
// | SHIFTC='-{ShiftC}->' | SHIFTP='-{ShiftP}->' | FLATTENC='-{FlattenC}->' | FORKC='-{ForkC}->' | FORKP='-{ForkP}->'
// | JOINC='-{JoinC}->' | JOINP='-{JoinP}->' | LAST='-{Last}->';
public LinkTypeElements getLinkTypeAccess() {
return eLinkType;
}

View File

@@ -26,8 +26,8 @@ Workflow {
language = StandardLanguage {
name = "com.irtsaintexupery.xtext.pseim.Pattern"
fileExtensions = "pseimx"
referencedResource = "platform:/resource/com.irtsaintexupery.modelpattern/model/pseim.genmodel"
referencedResource = "platform:/resource/com.irtsaintexupery.modelpattern/model/seim.genmodel"
referencedResource = "platform:/resource/com.irtsaintexupery.pseim/model/pseim.genmodel"
referencedResource = "platform:/resource/com.irtsaintexupery.pseim/model/seim.genmodel"
fragment = ecore2xtext.Ecore2XtextValueConverterServiceFragment2 auto-inject {}

View File

@@ -38,7 +38,9 @@ PatternLink returns PatternLink:
;
enum LinkType returns LinkType:
TRANSPOSE = '-{Transpose}->' | IDENTITY = '-{Identity}->' | FIRST = '-{First}->' | ROTATE = '-{Rotate}->' | SHIFT = '-{Shift}->'
TRANSPOSE = '-{Transpose}->' | IDENTITY = '-{Identity}->' | FIRST = '-{First}->' | ROTATE = '-{Rotate}->'
| SHIFTC = '-{ShiftC}->' | SHIFTP = '-{ShiftP}->' | FLATTENC = '-{FlattenC}->' | FORKC = '-{ForkC}->' | FORKP = '-{ForkP}->'
| JOINC = '-{JoinC}->' | JOINP = '-{JoinP}->' | LAST = '-{Last}->'
;
enum PortDirection returns seim::PortDirection: