Skip to content

Commit

Permalink
Merge pull request #355 from Cryptonomic/507-parser
Browse files Browse the repository at this point in the history
Edo instruction parsing
  • Loading branch information
anonymoussprocket authored Feb 23, 2021
2 parents f8fa154 + c6e5f0e commit aff0412
Show file tree
Hide file tree
Showing 34 changed files with 843 additions and 97 deletions.
26 changes: 20 additions & 6 deletions grammar/tezos/Michelson.ne
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ const lexer = moo.compile({
parameter: [ 'parameter' , 'Parameter'],
storage: ['Storage', 'storage'],
code: ['Code', 'code'],
comparableType: ['int', 'nat', 'string', 'bytes', 'mutez', 'bool', 'key_hash', 'timestamp', 'chain_id'],
constantType: ['key', 'unit', 'signature', 'operation', 'address'],
singleArgType: ['option', 'list', 'set', 'contract'],
comparableType: ['int', 'nat', 'string', 'bytes', 'mutez', 'bool', 'key_hash', 'timestamp', 'chain_id', /* Edo types*/ 'never'],
constantType: ['key', 'unit', 'signature', 'operation', 'address', /* Edo types */ 'bls12_381_fr', 'bls12_381_g1', 'bls12_381_g2'],
singleArgType: ['option', 'list', 'set', 'contract', 'ticket'],
doubleArgType: ['pair', 'or', 'lambda', 'map', 'big_map'],
baseInstruction: ['ABS', 'ADD', 'ADDRESS', 'AMOUNT', 'AND', 'BALANCE', 'BLAKE2B', 'CAR', 'CAST', 'CDR', 'CHECK_SIGNATURE',
'COMPARE', 'CONCAT', 'CONS', 'CONTRACT', /*'CREATE_CONTRACT',*/ 'DIP', /*'DROP',*/ /*'DUP',*/ 'EDIV', /*'EMPTY_MAP',*/
Expand All @@ -48,7 +48,9 @@ const lexer = moo.compile({
'IF_SOME', // TODO: macro
'IFCMPEQ', 'IFCMPNEQ', 'IFCMPLT', 'IFCMPGT', 'IFCMPLE', 'IFCMPGE', 'CMPEQ', 'CMPNEQ', 'CMPLT', 'CMPGT', 'CMPLE',
'CMPGE', 'IFEQ', 'NEQ', 'IFLT', 'IFGT', 'IFLE', 'IFGE', // TODO: should be separate
/*'DIG',*/ /*'DUG',*/ 'EMPTY_BIG_MAP', 'APPLY', 'CHAIN_ID'
/*'DIG',*/ /*'DUG',*/ 'EMPTY_BIG_MAP', 'APPLY', 'CHAIN_ID',
// Edo instructions
'KECCAK', 'SHA3', 'PAIRING_CHECK', 'SAPLING_EMPTY_STATE', 'SAPLING_VERIFY_UPDATE', 'GET_AND_UPDATE', 'NEVER', 'VOTING_POWER', 'TOTAL_VOTING_POWER', 'TICKET', 'READ_TICKET', 'SPLIT_TICKET', 'JOIN_TICKETS', 'SELF_ADDRESS', 'LEVEL'
],
macroCADR: macroCADRconst,
macroDIP: macroDIPconst,
Expand Down Expand Up @@ -94,6 +96,10 @@ type ->
| %lparen _ %constantType (_ %annot):+ _ %rparen {% comparableTypeToJson %}
| %lparen _ %singleArgType (_ %annot):+ _ type %rparen {% singleArgTypeKeywordWithParenToJson %}
| %lparen _ %doubleArgType (_ %annot):+ _ type _ type %rparen {% doubleArgTypeKeywordWithParenToJson %}
| %lparen _ "sapling_state" (_ %annot):+ _ %number %rparen {% saplingToJson %}
| %lparen _ "sapling_transaction" (_ %annot):+ _ %number %rparen {% saplingToJson %}
| %lparen _ "sapling_state" _ %number %rparen {% saplingToJson %}
| %lparen _ "sapling_transaction" _ %number %rparen {% saplingToJson %}

typeData ->
%singleArgType _ typeData {% singleArgKeywordToJson %}
Expand Down Expand Up @@ -494,7 +500,7 @@ semicolons -> [;]:?
const singleArgInstrKeywordToJson = d => {
const word = `${d[0].toString()}`
if (check_dip(word)) {
return expandDIP(word, d[2])
return expandDIP(word, d[2]);
} else {
return `{ "prim": "${d[0]}", "args": [ [ ${d[2]} ] ] }`; /*TODO: [] double-wrapping here is Bad*/
}
Expand Down Expand Up @@ -524,7 +530,7 @@ semicolons -> [;]:?
const doubleArgKeywordToJson = d => {
if (d.length === 7) {
/*
This handles the case where a blank {} for %subInstuction should be blank, but for %data they should be an empty array, see TODO about double-wrapping
This handles the case where a blank {} for %subInstruction should be blank, but for %data they should be an empty array, see TODO about double-wrapping
*/
return `{ "prim": "${d[0]}", "args": [ ${d[2]}, [] ] }`;
} else {
Expand Down Expand Up @@ -608,6 +614,14 @@ semicolons -> [;]:?
return `{ "prim": "PUSH", "args": [ ${d[3]}, ${d[5]} ], "annots": [${annot}] }`;
}

const saplingToJson = d => {
if (d.length == 7) { // if there exists an annotation
const annot = d[3].map(x => `"${x[1]}"`);
return `{ "prim": "${d[2]}", "args": [ { "int": "${d[5]}" } ], "annots": [${annot}] }`;
} else
return `{ "prim": "${d[2]}", "args": [ { "int": "${d[4]}" } ] }`;
}

const dipnToJson = d => (d.length > 4) ? `{ "prim": "${d[0]}", "args": [ { "int": "${d[2]}" }, [ ${d[4]} ] ] }` : `{ "prim": "${d[0]}", "args": [ ${d[2]} ] }`;

const dupnToJson = d => {
Expand Down
34 changes: 28 additions & 6 deletions src/chain/tezos/lexer/Michelson.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@ const lexer = moo.compile({
parameter: [ 'parameter' , 'Parameter'],
storage: ['Storage', 'storage'],
code: ['Code', 'code'],
comparableType: ['int', 'nat', 'string', 'bytes', 'mutez', 'bool', 'key_hash', 'timestamp', 'chain_id'],
constantType: ['key', 'unit', 'signature', 'operation', 'address'],
singleArgType: ['option', 'list', 'set', 'contract'],
comparableType: ['int', 'nat', 'string', 'bytes', 'mutez', 'bool', 'key_hash', 'timestamp', 'chain_id', /* Edo types*/ 'never'],
constantType: ['key', 'unit', 'signature', 'operation', 'address', /* Edo types */ 'bls12_381_fr', 'bls12_381_g1', 'bls12_381_g2'],
singleArgType: ['option', 'list', 'set', 'contract', 'ticket'],
doubleArgType: ['pair', 'or', 'lambda', 'map', 'big_map'],
baseInstruction: ['ABS', 'ADD', 'ADDRESS', 'AMOUNT', 'AND', 'BALANCE', 'BLAKE2B', 'CAR', 'CAST', 'CDR', 'CHECK_SIGNATURE',
'COMPARE', 'CONCAT', 'CONS', 'CONTRACT', /*'CREATE_CONTRACT',*/ 'DIP', /*'DROP',*/ /*'DUP',*/ 'EDIV', /*'EMPTY_MAP',*/
Expand All @@ -77,7 +77,9 @@ const lexer = moo.compile({
'IF_SOME', // TODO: macro
'IFCMPEQ', 'IFCMPNEQ', 'IFCMPLT', 'IFCMPGT', 'IFCMPLE', 'IFCMPGE', 'CMPEQ', 'CMPNEQ', 'CMPLT', 'CMPGT', 'CMPLE',
'CMPGE', 'IFEQ', 'NEQ', 'IFLT', 'IFGT', 'IFLE', 'IFGE', // TODO: should be separate
/*'DIG',*/ /*'DUG',*/ 'EMPTY_BIG_MAP', 'APPLY', 'CHAIN_ID'
/*'DIG',*/ /*'DUG',*/ 'EMPTY_BIG_MAP', 'APPLY', 'CHAIN_ID',
// Edo instructions
'KECCAK', 'SHA3', 'PAIRING_CHECK', 'SAPLING_EMPTY_STATE', 'SAPLING_VERIFY_UPDATE', 'GET_AND_UPDATE', 'NEVER', 'VOTING_POWER', 'TOTAL_VOTING_POWER', 'TICKET', 'READ_TICKET', 'SPLIT_TICKET', 'JOIN_TICKETS', 'SELF_ADDRESS', 'LEVEL'
],
macroCADR: macroCADRconst,
macroDIP: macroDIPconst,
Expand Down Expand Up @@ -393,7 +395,7 @@ const lexer = moo.compile({
const singleArgInstrKeywordToJson = d => {
const word = `${d[0].toString()}`
if (check_dip(word)) {
return expandDIP(word, d[2])
return expandDIP(word, d[2]);
} else {
return `{ "prim": "${d[0]}", "args": [ [ ${d[2]} ] ] }`; /*TODO: [] double-wrapping here is Bad*/
}
Expand Down Expand Up @@ -423,7 +425,7 @@ const lexer = moo.compile({
const doubleArgKeywordToJson = d => {
if (d.length === 7) {
/*
This handles the case where a blank {} for %subInstuction should be blank, but for %data they should be an empty array, see TODO about double-wrapping
This handles the case where a blank {} for %subInstruction should be blank, but for %data they should be an empty array, see TODO about double-wrapping
*/
return `{ "prim": "${d[0]}", "args": [ ${d[2]}, [] ] }`;
} else {
Expand Down Expand Up @@ -507,6 +509,14 @@ const lexer = moo.compile({
return `{ "prim": "PUSH", "args": [ ${d[3]}, ${d[5]} ], "annots": [${annot}] }`;
}

const saplingToJson = d => {
if (d.length == 7) { // if there exists an annotation
const annot = d[3].map(x => `"${x[1]}"`);
return `{ "prim": "${d[2]}", "args": [ { "int": "${d[5]}" } ], "annots": [${annot}] }`;
} else
return `{ "prim": "${d[2]}", "args": [ { "int": "${d[4]}" } ] }`;
}

const dipnToJson = d => (d.length > 4) ? `{ "prim": "${d[0]}", "args": [ { "int": "${d[2]}" }, [ ${d[4]} ] ] }` : `{ "prim": "${d[0]}", "args": [ ${d[2]} ] }`;

const dupnToJson = d => {
Expand Down Expand Up @@ -616,6 +626,18 @@ const grammar: Grammar = {
{"name": "type$ebnf$6$subexpression$2", "symbols": ["_", (lexer.has("annot") ? {type: "annot"} : annot)]},
{"name": "type$ebnf$6", "symbols": ["type$ebnf$6", "type$ebnf$6$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]])},
{"name": "type", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", (lexer.has("doubleArgType") ? {type: "doubleArgType"} : doubleArgType), "type$ebnf$6", "_", "type", "_", "type", (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": doubleArgTypeKeywordWithParenToJson},
{"name": "type$ebnf$7$subexpression$1", "symbols": ["_", (lexer.has("annot") ? {type: "annot"} : annot)]},
{"name": "type$ebnf$7", "symbols": ["type$ebnf$7$subexpression$1"]},
{"name": "type$ebnf$7$subexpression$2", "symbols": ["_", (lexer.has("annot") ? {type: "annot"} : annot)]},
{"name": "type$ebnf$7", "symbols": ["type$ebnf$7", "type$ebnf$7$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]])},
{"name": "type", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", {"literal":"sapling_state"}, "type$ebnf$7", "_", (lexer.has("number") ? {type: "number"} : number), (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": saplingToJson},
{"name": "type$ebnf$8$subexpression$1", "symbols": ["_", (lexer.has("annot") ? {type: "annot"} : annot)]},
{"name": "type$ebnf$8", "symbols": ["type$ebnf$8$subexpression$1"]},
{"name": "type$ebnf$8$subexpression$2", "symbols": ["_", (lexer.has("annot") ? {type: "annot"} : annot)]},
{"name": "type$ebnf$8", "symbols": ["type$ebnf$8", "type$ebnf$8$subexpression$2"], "postprocess": (d) => d[0].concat([d[1]])},
{"name": "type", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", {"literal":"sapling_transaction"}, "type$ebnf$8", "_", (lexer.has("number") ? {type: "number"} : number), (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": saplingToJson},
{"name": "type", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", {"literal":"sapling_state"}, "_", (lexer.has("number") ? {type: "number"} : number), (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": saplingToJson},
{"name": "type", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", {"literal":"sapling_transaction"}, "_", (lexer.has("number") ? {type: "number"} : number), (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": saplingToJson},
{"name": "typeData", "symbols": [(lexer.has("singleArgType") ? {type: "singleArgType"} : singleArgType), "_", "typeData"], "postprocess": singleArgKeywordToJson},
{"name": "typeData", "symbols": [(lexer.has("lparen") ? {type: "lparen"} : lparen), "_", (lexer.has("singleArgType") ? {type: "singleArgType"} : singleArgType), "_", "typeData", "_", (lexer.has("rparen") ? {type: "rparen"} : rparen)], "postprocess": singleArgKeywordWithParenToJson},
{"name": "typeData", "symbols": [(lexer.has("doubleArgType") ? {type: "doubleArgType"} : doubleArgType), "_", "typeData", "_", "typeData"], "postprocess": doubleArgKeywordToJson},
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0000025a0200000255050107650765066303590000000c25636865636b526573756c7404820000000325667207650480000000032567310765048100000003256732066303820000000a256d756c526573756c7405000764076408650482000000032566720765048000000003256731048100000003256732000000042561646408650382038200000004256d756c0764046c00000007256e65676174650764065f0765038003810000000e2570616972696e675f636865636b046c0000000625746f496e740502020000019103210317034c0316072e02000000b4072e0200000078034c03210316034c031703210317034c0316057000030321057100040317031603120342034c034203210316034c031703210316034c031703210317034c0316057000040321057100050317031703120342034c0342034c034203210317034c031603210316034c03170570000303160312034c034203420200000030034c03210316034c031703210316034c031703160570000303210317034c0316033a0346034c0342034c0342034c034202000000c3072e020000001e0320032103210316034c031703170570000203170316033b0342034c03420200000099072e0200000018034c03210317034c0316031705700002037f0346034203420200000075032003210316031703300743035b008080808080808080808080808080808080808080808080808080808080aebfbbd5fc95da0903190325072c020000000002000000310743036801000000264661696c656420746f2063617374206669656c6420656c656d656e7420467220746f20496e740327053d036d0342
0000025a020000025505000764076408650482000000032566720765048000000003256731048100000003256732000000042561646408650382038200000004256d756c0764046c00000007256e65676174650764065f0765038003810000000e2570616972696e675f636865636b046c0000000625746f496e74050107650765066303590000000c25636865636b526573756c7404820000000325667207650480000000032567310765048100000003256732066303820000000a256d756c526573756c740502020000019103210317034c0316072e02000000b4072e0200000078034c03210316034c031703210317034c0316057000030321057100040317031603120342034c034203210316034c031703210316034c031703210317034c0316057000040321057100050317031703120342034c0342034c034203210317034c031603210316034c03170570000303160312034c034203420200000030034c03210316034c031703210316034c031703160570000303210317034c0316033a0346034c0342034c0342034c034202000000c3072e020000001e0320032103210316034c031703170570000203170316033b0342034c03420200000099072e0200000018034c03210317034c0316031705700002037f0346034203420200000075032003210316031703300743035b008080808080808080808080808080808080808080808080808080808080aebfbbd5fc95da0903190325072c020000000002000000310743036801000000264661696c656420746f2063617374206669656c6420656c656d656e7420467220746f20496e740327053d036d0342
Original file line number Diff line number Diff line change
@@ -1,25 +1,4 @@
[
{
"prim": "storage",
"args": [
{
"prim": "pair",
"args": [
{ "prim": "pair", "args": [ { "prim": "option", "args": [ { "prim": "bool" } ], "annots": [ "%checkResult" ] }, { "prim": "bls12_381_fr", "annots": [ "%fr" ] } ] },
{
"prim": "pair",
"args": [
{ "prim": "bls12_381_g1", "annots": [ "%g1" ] },
{
"prim": "pair",
"args": [ { "prim": "bls12_381_g2", "annots": [ "%g2" ] }, { "prim": "option", "args": [ { "prim": "bls12_381_fr" } ], "annots": [ "%mulResult" ] } ]
}
]
}
]
}
]
},
{
"prim": "parameter",
"args": [
Expand Down Expand Up @@ -57,6 +36,27 @@
}
]
},
{
"prim": "storage",
"args": [
{
"prim": "pair",
"args": [
{ "prim": "pair", "args": [ { "prim": "option", "args": [ { "prim": "bool" } ], "annots": [ "%checkResult" ] }, { "prim": "bls12_381_fr", "annots": [ "%fr" ] } ] },
{
"prim": "pair",
"args": [
{ "prim": "bls12_381_g1", "annots": [ "%g1" ] },
{
"prim": "pair",
"args": [ { "prim": "bls12_381_g2", "annots": [ "%g2" ] }, { "prim": "option", "args": [ { "prim": "bls12_381_fr" } ], "annots": [ "%mulResult" ] } ]
}
]
}
]
}
]
},
{
"prim": "code",
"args": [
Expand Down Expand Up @@ -224,4 +224,4 @@
]
]
}
]
]
1 change: 1 addition & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/hashes.hex
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
00000052020000004d0500036905010765046900000007256b656363616b04690000000525736861330502020000002603210316034c03170317034c032105710002037d0342034c037e034c03160342053d036d0342
28 changes: 28 additions & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/hashes.micheline
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[
{ "prim": "parameter", "args": [ { "prim": "bytes" } ] },
{ "prim": "storage", "args": [ { "prim": "pair", "args": [ { "prim": "bytes", "annots": [ "%keccak" ] }, { "prim": "bytes", "annots": [ "%sha3" ] } ] } ] },
{
"prim": "code",
"args": [
[
{ "prim": "DUP" },
{ "prim": "CAR" },
{ "prim": "SWAP" },
{ "prim": "CDR" },
{ "prim": "CDR" },
{ "prim": "SWAP" },
{ "prim": "DUP" },
{ "prim": "DUG", "args": [ { "int": "2" } ] },
{ "prim": "KECCAK" },
{ "prim": "PAIR" },
{ "prim": "SWAP" },
{ "prim": "SHA3" },
{ "prim": "SWAP" },
{ "prim": "CAR" },
{ "prim": "PAIR" },
{ "prim": "NIL", "args": [ { "prim": "operation" } ] },
{ "prim": "PAIR" }
]
]
}
]
25 changes: 25 additions & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/hashes.michelson
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
parameter bytes;
storage (pair (bytes %keccak) (bytes %sha3));
code
{
DUP; # pair @parameter @storage : pair @parameter @storage
CAR; # @parameter : pair @parameter @storage
SWAP; # pair @parameter @storage : @parameter
CDR; # @storage : @parameter
# == new_value ==
# self.data.keccak = sp.keccak(params) # @storage : @parameter
CDR; # bytes : @parameter
SWAP; # @parameter : bytes
DUP; # @parameter : @parameter : bytes
DUG 2; # @parameter : bytes : @parameter
KECCAK; # bytes : bytes : @parameter
PAIR; # pair bytes bytes : @parameter
SWAP; # @parameter : pair bytes bytes
# self.data.sha3 = sp.sha3(params) # @parameter : pair bytes bytes
SHA3; # bytes : pair bytes bytes
SWAP; # pair bytes bytes : bytes
CAR; # bytes : bytes
PAIR; # pair bytes bytes
NIL operation; # list operation : pair bytes bytes
PAIR; # pair (list operation) (pair bytes bytes)
};
1 change: 1 addition & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/level.hex
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0000001e02000000190500036c050103620502020000000a03200376053d036d0342
5 changes: 5 additions & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/level.micheline
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[
{ "prim": "parameter", "args": [ { "prim": "unit" } ] },
{ "prim": "storage", "args": [ { "prim": "nat" } ] },
{ "prim": "code", "args": [ [ { "prim": "DROP" }, { "prim": "LEVEL" }, { "prim": "NIL", "args": [ { "prim": "operation" } ] }, { "prim": "PAIR" } ] ] }
]
11 changes: 11 additions & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/level.michelson
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
parameter unit;
storage nat;
code
{
DROP; #
# == entry_point_1 ==
# self.data.level = sp.level #
LEVEL; # nat
NIL operation; # list operation : nat
PAIR; # pair (list operation) nat
};
1 change: 1 addition & 0 deletions test/chain/tezos/lexer/samples/p008-opcodes/maps.hex
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0000033502000003300500076407640764046c0000001425746573745f6765745f616e645f7570646174650860035b03680000000d25746573745f6d61705f67657407640860035b03680000000e25746573745f6d61705f676574320860035b03680000001c25746573745f6d61705f6765745f64656661756c745f76616c756573076407640860035b03680000001b25746573745f6d61705f6765745f6d697373696e675f76616c75650860035b03680000001c25746573745f6d61705f6765745f6d697373696e675f76616c75653207640860035b03680000001125746573745f6d61705f6765745f6f7074046c0000001025746573745f7570646174655f6d61700501076507650860035b036800000002256d04680000000225780765066303680000000225790663036800000002257a050202000001fe03210317034c0316072e0200000102072e0200000082072e0200000044034c03210571000203160316074305630368050901000000036f6e650743035b0001038c034c0320034c0320034c03210316034c0317031605700002034c0342034c03420200000032034c03210317034c03160316057000020743035b000c0329072f02000000080743035b000c03270200000000034c034203420200000074072e0200000032034c03210317034c03160316057000020743035b000c0329072f02000000080743035b001103270200000000034c034203420200000036034c03210317034c03160316057000020743035b000c0329072f020000000c0743036801000000036162630200000000034c0342034202000000e2072e020000007e072e020000003f034c03210317034c03160316057000020743035b000c0329072f020000001507430368010000000a6d697373696e6720313203270200000000034c034203420200000033034c03210317034c03160316057000020743035b000c0329072f02000000090743035b00921303270200000000034c034203420200000058072e020000001e034c03210316034c03170317057000020743035b000c03290342034c0342020000002e032003210317034c031603210317034c0316074305630368050901000000036f6e650743035b0001035003420342053d036d0342
Loading

0 comments on commit aff0412

Please sign in to comment.