/*
* Reference ETL Parser for Java
* Copyright (c) 2000-2009 Constantine A Plotnikov
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.sf.etl.parsers.internal.term_parser.compiler;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Set;
import net.sf.etl.parsers.LexerStates;
import net.sf.etl.parsers.ObjectName;
import net.sf.etl.parsers.PropertyName;
import net.sf.etl.parsers.SyntaxRole;
import net.sf.etl.parsers.TermContext;
import net.sf.etl.parsers.Terms;
import net.sf.etl.parsers.TextPos;
import net.sf.etl.parsers.TokenKey;
import net.sf.etl.parsers.internal.lexer.DefaultLexer;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.AlternateStartSequence;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.BlockNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.CallNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.ChoiceNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.CommitMarkNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.ErrorNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.FallbackObjectNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.FirstChoiceNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.GroupNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.MarkedNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.Node;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.ObjectNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.PropertyNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.RepeatNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.ScopeNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.SegmentNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.SequenceNode;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.TermContextScope;
import net.sf.etl.parsers.internal.term_parser.compiler.nodes.TokenNode;
import net.sf.etl.parsers.internal.term_parser.flattened.DefinitionView;
import net.sf.etl.parsers.internal.term_parser.flattened.GrammarView;
import net.sf.etl.parsers.internal.term_parser.flattened.WrapperLink;
import net.sf.etl.parsers.internal.term_parser.grammar.Element;
import net.sf.etl.parsers.internal.term_parser.grammar.Wrapper;
import net.sf.etl.parsers.internal.term_parser.states.ActivationFactory;
import net.sf.etl.parsers.internal.term_parser.states.ActivationFactoryImpl;
import net.sf.etl.parsers.internal.term_parser.states.LookAheadSet;
import net.sf.etl.parsers.internal.term_parser.states.ReturnState;
import net.sf.etl.parsers.internal.term_parser.states.State;
/**
* This utility class is used to build state machines. Its methods use terms of
* LL1 grammar. However it has strong dependency on grammar AST and compiler
* classes.
*
* @author const
*
*/
public class StateMachineBuilder {
/** stack of nodes */
private final ArrayList<Node> stack = new ArrayList<Node>();
/** top node */
private Node topNode;
/** node to return */
private Node returnNode;
/** a stack of definitions */
private final ArrayList<DefinitionView> definitionStack = new ArrayList<DefinitionView>();
/** a context compiler, it is used for error reporting */
private final ContextBuilder contextBuilder;
/** an activation factory that is being created by this builder */
private final ActivationFactoryImpl targetFactory;
/**
* A constructor for builder
*
* @param contextBuilder
* a parent builder
* @param targetFactory
* a factory that is being built
*/
public StateMachineBuilder(ContextBuilder contextBuilder,
ActivationFactoryImpl targetFactory) {
super();
this.contextBuilder = contextBuilder;
this.targetFactory = targetFactory;
}
/**
* finish compiling
*
* @return last node
*/
public Node finish() {
return returnNode;
}
/**
* start node
*
* @param n
* node to start
*/
private void startNode(Node n) {
setNodeContext(n);
stack.add(n);
topNode = n;
}
/**
* Supply context information to node
*
* @param n
* a node to update
*/
private void setNodeContext(Node n) {
n.setBuilder(this);
if (!definitionStack.isEmpty()) {
n.setDefinition(topDefinition());
}
}
/**
* End node
*
* @param <T>
* node type
*
* @param cls
* class of node to end
* @return a node that have been finished to be created
*/
private <T extends Node> T endNode(Class<T> cls) {
final T ct = cls.cast(stack.remove(stack.size() - 1));
if (stack.size() == 0) {
assert returnNode == null : "there already is return node "
+ returnNode;
returnNode = ct;
} else {
topNode = stack.get(stack.size() - 1);
processNode(ct);
}
return ct;
}
/**
* Helper method that adds ct to current top node
*
* @param ct
* node to add
*/
private void processNode(Node ct) {
if (topNode instanceof GroupNode) {
final GroupNode sn = (GroupNode) topNode;
sn.nodes().add(ct);
} else if (topNode instanceof ScopeNode) {
final GroupNode sn = (GroupNode) ((ScopeNode) topNode).innerNode();
sn.nodes().add(ct);
} else {
assert false : "unknown top node" + topNode;
}
}
/**
* Write current single node
*
* @param n
* node to write
*/
private void singleNode(Node n) {
setNodeContext(n);
if (topNode == null) {
assert returnNode == null : "there already is return node";
returnNode = n;
} else {
processNode(n);
}
}
/**
* start choice node
*/
public void startFirstChoice() {
startNode(new FirstChoiceNode());
}
/**
* end choice node
*
* @return just closed node
*/
public FirstChoiceNode endFirstChoice() {
return endNode(FirstChoiceNode.class);
}
/**
* start choice node
*/
public void startChoice() {
startNode(new ChoiceNode());
}
/**
* end choice node
*/
public void endChoice() {
endNode(ChoiceNode.class);
}
/**
* start repeat node
*/
public void startRepeat() {
startNode(new RepeatNode());
}
/**
* end repeat node
*/
public void endRepeat() {
endNode(RepeatNode.class);
}
/**
* start repeat node
*
* @param context
* a context of the block
*/
public void startBlock(TermContext context) {
startNode(new BlockNode(context));
}
/**
* end repeat node
*/
public void endBlock() {
endNode(BlockNode.class);
}
/**
* Create call node
*
* @param builder
* a builder for factory associated with the node
*/
public void call(StateMachineBuilder builder) {
singleNode(new CallNode(builder));
}
/**
* start object node
*
* @param name
* a name of object
*/
public void startObject(ObjectName name) {
startNode(new ObjectNode(name, false, null));
}
/**
* end object node
*
* @return an object node that have been created
*/
public ObjectNode endObject() {
return endNode(ObjectNode.class);
}
/**
* start property node
*
* @param name
* a name of property
* @param isList
* flag indicating if it is a list property
*/
public void startProperty(PropertyName name, boolean isList) {
startNode(new PropertyNode(name, isList, false));
}
/**
* start property node at mark
*
* @param name
* a name of property
* @param isList
* flag indicating if it is a list property
*/
public void startPropertyAtMark(PropertyName name, boolean isList) {
startNode(new PropertyNode(name, isList, true));
}
/**
* end property node
*/
public void endProperty() {
endNode(PropertyNode.class);
}
/**
* start sequence node
*/
public void startSequence() {
startNode(new SequenceNode());
}
/**
* end sequence node
*/
public void endSequence() {
endNode(SequenceNode.class);
}
/**
* start expression node
*
* @param context
* a context for the node
*/
public void startExpression(TermContext context) {
startNode(new TermContextScope(context, Terms.EXPRESSION_START,
Terms.EXPRESSION_END));
}
/**
* end expression node
*/
public void endExpression() {
endNode(TermContextScope.class);
}
/**
* start attributes node
*
* @param context
* a context for the node
*/
public void startAttributes(TermContext context) {
startNode(new TermContextScope(context, Terms.ATTRIBUTES_START,
Terms.ATTRIBUTES_END));
}
/**
* end attributes node
*/
public void endAttributes() {
endNode(TermContextScope.class);
}
/**
* start attributes node
*
* @param context
* a context for the node
*/
public void startDocComent(TermContext context) {
startNode(new TermContextScope(context, Terms.DOC_COMMENT_START,
Terms.DOC_COMMENT_END));
}
/**
* end attributes node
*/
public void endDocComent() {
endNode(TermContextScope.class);
}
/**
* start modifiers node
*/
public void startModifiers() {
startNode(new TermContextScope(Terms.MODIFIERS_START,
Terms.MODIFIERS_END));
}
/**
* end modifiers node
*/
public void endModifiers() {
endNode(TermContextScope.class);
}
/**
* start segment node
*
* @param context
* a context for the node
*/
public void startSegment(TermContext context) {
startNode(new SegmentNode(context));
}
/**
* end segment node
*/
public void endSegment() {
endNode(SegmentNode.class);
}
/**
* Create node that matches specified text
*
* @param kind
* a term kind of node
* @param role
* a role of the node
* @param text
* a text of the node
*
*/
public void tokenText(Terms kind, SyntaxRole role, String text) {
final DefaultLexer lexer = new DefaultLexer();
lexer.reparse("local:", new StringReader(text), TextPos.START,
LexerStates.DEFAULT_STATE);
lexer.advance();
TokenKey key = lexer.current().key();
switch (key.kind()) {
case STRING:
case INTEGER_WITH_SUFFIX:
case FLOAT_WITH_SUFFIX:
case IDENTIFIER:
case INTEGER:
case FLOAT:
case GRAPHICS:
case CLOSE_ROUND:
case CLOSE_SQUARE:
case OPEN_ROUND:
case OPEN_SQUARE:
case COMMA:
break;
default:
throw new IllegalArgumentException("Unexpected token kind: "
+ lexer.current().key() + " for value: "
+ lexer.current().text());
}
token(kind, role, key, text);
}
/**
* Generic method that add token node
*
* @param kind
* a term kind
* @param role
* a syntax role
* @param tokenKey
* a token kind
* @param text
* a text of token
*/
private void token(Terms kind, SyntaxRole role, TokenKey tokenKey,
String text) {
singleNode(new TokenNode(kind, role, tokenKey, text));
}
/**
* A error node
*
* @param errorId
* error id
* @param errorArgs
* error arguments
*/
public void error(String errorId, Object errorArgs[]) {
singleNode(new ErrorNode(errorId, errorArgs));
}
/**
* Create node that matches specified token kind
*
* @param kind
* a term kind of node
* @param role
* a role of the node
* @param tokenKey
* a token kind
*
*/
public void tokenText(Terms kind, SyntaxRole role, TokenKey tokenKey) {
token(kind, role, tokenKey, null);
}
/**
* Create node that matches specified token
*
* @param kind
* a term kind of node
* @param role
* a role of the node
*
*/
public void anyToken(Terms kind, SyntaxRole role) {
token(kind, role, null, null);
}
/**
* start marked region node
*/
public void startMarked() {
startNode(new MarkedNode());
}
/**
* end marked region node
*/
public void endMarked() {
endNode(MarkedNode.class);
}
/**
* create commit mark node
*/
public void commitMark() {
singleNode(new CommitMarkNode());
}
/**
* Start compiling definition
*
* @param view
* a defintiion view to compile
*/
public void startDefinition(DefinitionView view) {
definitionStack.add(view);
}
/**
* end definition
*/
public void endDefinition() {
definitionStack.remove(definitionStack.size() - 1);
}
/**
* Start object at mark
*
* @param name
* an object to start
* @param wrappers
* wrappers for this object node
*/
public void startObjectAtMark(ObjectName name, WrapperLink wrappers) {
startNode(new ObjectNode(name, true, wrappers));
}
/**
* Start fallback scope, the scope has to be initialized at some time
*
* @return a created node
*/
public FallbackObjectNode startFallbackScope() {
final FallbackObjectNode rc = new FallbackObjectNode();
startNode(rc);
return rc;
}
/**
* End fallback scope
*/
public void endFallbackScope() {
endNode(FallbackObjectNode.class);
}
/**
* Start object at mark
*
* @param name
* an object to start
*/
public void startObjectAtMark(ObjectName name) {
startNode(new ObjectNode(name, true, null));
}
/**
* Start wrapper
*
* @param w
* a wrapper to start
*/
public void startWrapper(Wrapper w) {
if (w != null) {
final DefinitionView d = topDefinition();
startObject(d.convertName(w.object));
startProperty(new PropertyName(w.property), false);
}
}
/**
* End wrapper
*
* @param w
* a wrapper to end
*/
public void endWrapper(Wrapper w) {
if (w != null) {
endProperty();
endObject();
}
}
/**
* @return top definition on definition stack
*/
public DefinitionView topDefinition() {
if (definitionStack.isEmpty()) {
throw new IllegalStateException(
"[BUG]Obtaining definition when no definition is active.");
} else {
return definitionStack.get(definitionStack.size() - 1);
}
}
/**
* start alternate entry node.
*/
public void startAlternateEntry() {
startNode(new AlternateStartSequence());
}
/**
* end alternate entry node
*/
public void endAlternateEntry() {
endNode(AlternateStartSequence.class);
}
/**
* Set alternate entry point for the factory
*
* @param entry
* an alternate entry point
*/
public void setAlternateEntryState(State entry) {
targetFactory.setAlternateEntry(entry);
}
/**
* Build state machine for the activation factory
*/
public void buildStatementSequenceStateMachine() {
if (topNode == null) {
throw new IllegalStateException(
"[BUG]Top node should have been ended already");
}
}
/**
* @return activation factory created using this builder
*/
public ActivationFactory activation() {
return this.targetFactory;
}
/**
* @return context associated with builder
*/
public TermContext context() {
return this.contextBuilder.termContext();
}
/**
* Build lookahead
*
* @param visitedBuilders
* a list of visisted builders
* @return built look ahead
*/
public LookAheadSet buildLookAhead(Set<StateMachineBuilder> visitedBuilders) {
if (visitedBuilders.add(this)) {
try {
return topNode.buildLookAhead(visitedBuilders);
} finally {
visitedBuilders.remove(this);
}
} else {
final GrammarView v = contextBuilder.grammarBuilder().grammarView();
final Element e = v.getGrammar();
contextBuilder.error(this, e, "grammar.Context.cyclicContext",
new String[] { contextBuilder.name(), v.getSystemId() });
return new LookAheadSet();
}
}
/**
* Build state machine
*/
public void buildStateMachine() {
targetFactory.setPrimaryEntry(topNode.buildStates(this,
new ReturnState(false), new ReturnState(true)));
}
/**
* @return a context builder that created this state machine
*/
public ContextBuilder contextBuilder() {
return contextBuilder;
}
}