rdflib-2.4.2/0000755000175000017500000000000011256365012011724 5ustar nachonachordflib-2.4.2/src/0000755000175000017500000000000011204354476012520 5ustar nachonachordflib-2.4.2/src/bison/0000755000175000017500000000000011204354476013632 5ustar nachonachordflib-2.4.2/src/bison/SPARQLParser.ebnf0000644000175000017500000002261311153616037016646 0ustar nachonacho/* * DO NOT EDIT THIS FILE! * * Parser generated by BisonGen on Mon Oct 29 11:36:41 2007. */ Query ::= Prolog QueryTypes QueryTypes ::= SelectQuery | ConstructQuery | DescribeQuery | AskQuery DescribeQuery ::= DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier | DESCRIBE VAR_REFERENCES SolutionModifier | DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier | DESCRIBE VAR_REFERENCES WhereClause SolutionModifier ConstructQuery ::= CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier | CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier Prolog ::= BaseDecl PrefixDeclList | BaseDecl | PrefixDeclList | PrefixDeclList ::= PrefixDecl | PrefixDeclList PrefixDecl PrefixDecl ::= PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN | PREFIX PNAME_NS LESS_THAN GREATER_THAN BaseDecl ::= BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN AskQuery ::= ASK WhereClause | ASK DataSetClauseList WhereClause SelectQuery ::= SELECT VAR_REFERENCES WhereClause SolutionModifier | SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier | SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier | SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier VAR_REFERENCES ::= VariableReferenceList | ASTERISK VariableReferenceList ::= Var | VariableReferenceList Var IRIref ::= LESS_THAN Q_IRI_CONTENT GREATER_THAN | PrefixedName PrefixedName ::= PNAME_NS | PNAME_LN DataSetClauseList ::= DataSetClause | DataSetClauseList DataSetClause DataSetClause ::= FROM IRIref | FROM NAMED IRIref WhereClause ::= WHERE GroupGraphPattern | GroupGraphPattern SolutionModifier ::= | OrderClause | OrderClause LimitClause | OrderClause LimitClause OffsetClause | OrderClause OffsetClause LimitClause | LimitClause OffsetClause | OrderClause OffsetClause | OffsetClause | LimitClause OrderClause ::= ORDER BY OrderConditionList OrderConditionList ::= OrderCondition | OrderConditionList OrderCondition OrderCondition ::= ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | FunctionCall | BuiltInCall | LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | Var LimitClause ::= LIMIT NumericLiteral OffsetClause ::= OFFSET NumericLiteral GroupGraphPattern ::= LEFT_CURLY RIGHT_CURLY | LEFT_CURLY Triples GraphPatternList RIGHT_CURLY | LEFT_CURLY Triples RIGHT_CURLY | LEFT_CURLY GraphPatternList RIGHT_CURLY GraphPatternList ::= GraphPattern | GraphPatternList GraphPattern GraphPattern ::= Filter Triples | Filter DOT Triples | Filter DOT | Filter | GraphPatternNotTriples Triples | GraphPatternNotTriples DOT Triples | GraphPatternNotTriples | GraphPatternNotTriples DOT GraphPatternNotTriples ::= OPTIONAL GroupGraphPattern | GroupGraphPattern | GroupGraphPattern AlternativeGroupGraphPatterns | GRAPH Var GroupGraphPattern | GRAPH BlankNode GroupGraphPattern | GRAPH IRIref GroupGraphPattern AlternativeGroupGraphPatterns ::= UNION GroupGraphPattern | AlternativeGroupGraphPatterns UNION GroupGraphPattern ConditionalOrExpression ::= ConditionalAndExpression | ConditionalAndExpression ConditionalAndExpressionList ConditionalAndExpressionList ::= DOUBLE_PIPE ConditionalAndExpression | ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression ConditionalAndExpression ::= RelationalExpression ValueLogicalList | RelationalExpression ValueLogicalList ::= DOUBLE_AMPERSAND RelationalExpression | ValueLogicalList DOUBLE_AMPERSAND RelationalExpression RelationalExpression ::= AdditiveExpression | AdditiveExpression EQUALITY_OP AdditiveExpression | AdditiveExpression NOT_EQUAL AdditiveExpression | AdditiveExpression LESS_THAN AdditiveExpression | AdditiveExpression GREATER_THAN AdditiveExpression | AdditiveExpression LESS_THAN_EQUAL AdditiveExpression | AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression AdditiveExpression ::= MultiplicativeExpression | MultiplicativeExpression MultiplicativeExpressionList MultiplicativeExpressionList ::= PLUS MultiplicativeExpression | MINUS MultiplicativeExpression | MultiplicativeExpressionList MINUS MultiplicativeExpression | MultiplicativeExpressionList PLUS MultiplicativeExpression MultiplicativeExpression ::= UnaryExpression | UnaryExpression UnaryExpressionList UnaryExpressionList ::= ASTERISK UnaryExpression | FORWARDSLASH UnaryExpression | UnaryExpressionList ASTERISK UnaryExpression | UnaryExpressionList FORWARDSLASH UnaryExpression UnaryExpression ::= BANG PrimaryExpression | PLUS PrimaryExpression | MINUS PrimaryExpression | PrimaryExpression BuiltInCall ::= STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN | DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | BOUND LEFT_PAREN Var RIGHT_PAREN | isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | RegexExpression RegexExpression ::= REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN | REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN FunctionCall ::= IRIref LEFT_PAREN ArgumentList RIGHT_PAREN | IRIref NIL ArgumentList ::= ConditionalOrExpression | ConditionalOrExpression COMMA ArgumentList PrimaryExpression ::= LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | BuiltInCall | IRIref | FunctionCall | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | Var Filter ::= FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN | FILTER BuiltInCall | FILTER FunctionCall Triples ::= Triples DOT TriplesSameSubject | Triples DOT | TriplesSameSubject TriplesSameSubject ::= Var PropertyListNotEmpty | GraphTerm PropertyListNotEmpty | LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList | Collection PropertyListNotEmpty | Collection PropertyList ::= PropertyListNotEmpty | PropertyListNotEmpty ::= Verb ObjectList | Verb ObjectList SEMICOLON PropertyList ObjectList ::= GraphNode | ObjectList COMMA GraphNode GraphNode ::= Var | TriplesNode | GraphTerm Verb ::= Var | IRIref | A TriplesNode ::= Collection | LEFT_SQUARE PropertyList RIGHT_SQUARE Collection ::= LEFT_PAREN GraphNodeList RIGHT_PAREN GraphNodeList ::= GraphNode | GraphNodeList GraphNode Var ::= VARNAME GraphTerm ::= IRIref | RDFLiteral | NumericLiteral | PLUS NumericLiteral | MINUS NumericLiteral | BooleanLiteral | BlankNode | NIL NumericLiteral ::= INTEGER | DECIMAL | DOUBLE RDFLiteral ::= String | String LANGTAG | String DOUBLE_HAT IRIref BooleanLiteral ::= TRUE | FALSE String ::= STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 | STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 | STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 | STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 | STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 | STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 | STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 | STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 BlankNode ::= ANON | BLANK_NODE_LABEL rdflib-2.4.2/src/bison/SPARQLParser.c0000644000175000017500000151256711153616037016173 0ustar nachonacho/* * DO NOT EDIT THIS FILE! * * Parser generated by BisonGen on Mon Oct 29 11:36:41 2007. */ #include "Python.h" #include "structmember.h" #define PROJECT_NAME "rdflib.sparql.bison" #define PARSER_NAME "SPARQLParser" #define MODULE_INITFUNC initSPARQLParserc /* modules required for action routines */ static PyObject *IRIRef; static PyObject *Bindings; static PyObject *Query; static PyObject *QName; static PyObject *GraphPattern; static PyObject *FunctionLibrary; static PyObject *Operators; static PyObject *Triples; static PyObject *Resource; static PyObject *Filter; static PyObject *Util; static PyObject *Expression; static PyObject *SolutionModifier; static PyObject *rdflib; static PyObject *RDF; /* token definitions */ #define WHITESPACE 257 #define UNION 258 #define COLON 259 #define Q_IRI_CONTENT 260 #define PNAME_NS 261 #define CONSTRUCT 262 #define DESCRIBE 263 #define PNAME_LN 264 #define BLANK_NODE_LABEL 265 #define VARNAME 266 #define PREFIX 267 #define ASTERISK 268 #define DOT 269 #define QUESTION_MARK 270 #define DOLLAR 271 #define BASE 272 #define SELECT 273 #define DISTINCT 274 #define FROM 275 #define NAMED 276 #define OPTIONAL 277 #define FILTER 278 #define GRAPH 279 #define WHERE 280 #define ORDER 281 #define BY 282 #define ASC 283 #define ASK 284 #define DESC 285 #define LIMIT 286 #define OFFSET 287 #define STR 288 #define LANG 289 #define LANGMATCHES 290 #define DATATYPE 291 #define isIRI 292 #define isURI 293 #define isLITERAL 294 #define isBLANK 295 #define BOUND 296 #define REGEX 297 #define A 298 #define TRUE 299 #define FALSE 300 #define DOUBLE_AMPERSAND 301 #define DOUBLE_PIPE 302 #define BANG 303 #define DOUBLE_HAT 304 #define COMMA 305 #define FORWARDSLASH 306 #define LEFT_PAREN 307 #define RIGHT_PAREN 308 #define LEFT_SQUARE 309 #define RIGHT_SQUARE 310 #define SEMICOLON 311 #define INTEGER 312 #define DECIMAL 313 #define DOUBLE 314 #define STRING_LITERAL_DELIMETER_1 315 #define STRING_LITERAL_DELIMETER_2 316 #define STRING_LITERAL_DELIMETER_3 317 #define STRING_LITERAL_DELIMETER_4 318 #define STRING_LITERAL1 319 #define STRING_LITERAL2 320 #define STRING_LITERAL_LONG1 321 #define STRING_LITERAL_LONG2 322 #define NIL 323 #define ANON 324 #define LANGTAG 325 #define LEFT_CURLY 326 #define RIGHT_CURLY 327 #define PLUS 328 #define MINUS 329 #define EQUALITY_OP 330 #define NOT_EQUAL 331 #define LESS_THAN 332 #define GREATER_THAN 333 #define LESS_THAN_EQUAL 334 #define GREATER_THAN_EQUAL 335 /* vector mapping lexer token numbers into internal token numbers */ static const int token_translations[] = {2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81}; #define YYTRANSLATE(x) ((unsigned)(x) <= 335 ? token_translations[x] : 143) /* vector of items of all rules. */ static const int rhs_tokens[] = {0, 86, 83, 0, 91, 0, 85, 0, 84, 0, 90, 0, 9, 92, 96, 98, 99, 0, 9, 92, 99, 0, 9, 92, 96, 99, 0, 9, 92, 98, 99, 0, 8, 72, 126, 73, 96, 98, 99, 0, 8, 72, 126, 73, 98, 99, 0, 89, 87, 0, 89, 0, 87, 0, 0, 88, 0, 87, 88, 0, 13, 7, 78, 6, 79, 0, 13, 7, 78, 79, 0, 18, 78, 6, 79, 0, 30, 98, 0, 30, 96, 98, 0, 19, 92, 98, 99, 0, 19, 92, 96, 98, 99, 0, 19, 20, 92, 98, 99, 0, 19, 20, 92, 96, 98, 99, 0, 93, 0, 14, 0, 136, 0, 93, 136, 0, 78, 6, 79, 0, 95, 0, 7, 0, 10, 0, 97, 0, 96, 97, 0, 21, 94, 0, 21, 22, 94, 0, 26, 105, 0, 105, 0, 0, 100, 0, 100, 103, 0, 100, 103, 104, 0, 100, 104, 103, 0, 103, 104, 0, 100, 104, 0, 104, 0, 103, 0, 27, 28, 101, 0, 102, 0, 101, 102, 0, 29, 53, 110, 54, 0, 31, 53, 110, 54, 0, 122, 0, 120, 0, 53, 110, 54, 0, 136, 0, 32, 138, 0, 33, 138, 0, 72, 73, 0, 72, 126, 106, 73, 0, 72, 126, 73, 0, 72, 106, 73, 0, 107, 0, 106, 107, 0, 125, 126, 0, 125, 15, 126, 0, 125, 15, 0, 125, 0, 108, 126, 0, 108, 15, 126, 0, 108, 0, 108, 15, 0, 23, 105, 0, 105, 0, 105, 109, 0, 25, 136, 105, 0, 25, 142, 105, 0, 25, 94, 105, 0, 4, 105, 0, 109, 4, 105, 0, 112, 0, 112, 111, 0, 48, 112, 0, 111, 48, 112, 0, 114, 113, 0, 114, 0, 47, 114, 0, 113, 47, 114, 0, 115, 0, 115, 76, 115, 0, 115, 77, 115, 0, 115, 78, 115, 0, 115, 79, 115, 0, 115, 80, 115, 0, 115, 81, 115, 0, 117, 0, 117, 116, 0, 74, 117, 0, 75, 117, 0, 116, 75, 117, 0, 116, 74, 117, 0, 119, 0, 119, 118, 0, 14, 119, 0, 52, 119, 0, 118, 14, 119, 0, 118, 52, 119, 0, 49, 124, 0, 74, 124, 0, 75, 124, 0, 124, 0, 34, 53, 110, 54, 0, 35, 53, 110, 54, 0, 36, 53, 110, 51, 110, 54, 0, 37, 53, 110, 54, 0, 42, 53, 136, 54, 0, 38, 53, 110, 54, 0, 39, 53, 110, 54, 0, 41, 53, 110, 54, 0, 40, 53, 110, 54, 0, 121, 0, 43, 53, 110, 51, 110, 54, 0, 43, 53, 110, 51, 110, 51, 110, 54, 0, 94, 53, 123, 54, 0, 94, 69, 0, 110, 0, 110, 51, 123, 0, 53, 110, 54, 0, 120, 0, 94, 0, 122, 0, 139, 0, 138, 0, 140, 0, 142, 0, 136, 0, 24, 53, 110, 54, 0, 24, 120, 0, 24, 122, 0, 126, 15, 127, 0, 126, 15, 0, 127, 0, 136, 129, 0, 137, 129, 0, 55, 129, 56, 128, 0, 134, 129, 0, 134, 0, 129, 0, 0, 132, 130, 0, 132, 130, 57, 128, 0, 131, 0, 130, 51, 131, 0, 136, 0, 133, 0, 137, 0, 136, 0, 94, 0, 44, 0, 134, 0, 55, 128, 56, 0, 53, 135, 54, 0, 131, 0, 135, 131, 0, 12, 0, 94, 0, 139, 0, 138, 0, 74, 138, 0, 75, 138, 0, 140, 0, 142, 0, 69, 0, 58, 0, 59, 0, 60, 0, 141, 0, 141, 71, 0, 141, 50, 94, 0, 45, 0, 46, 0, 61, 65, 61, 0, 63, 66, 63, 0, 62, 67, 62, 0, 64, 68, 64, 0, 61, 61, 0, 63, 63, 0, 62, 62, 0, 64, 64, 0, 70, 0, 11, 0}; /* vector of line numbers and filename of all rules */ static const char* const rule_info[] = { ": line 0", "SPARQL.bgen: line 41", "SPARQL.bgen: line 52", "SPARQL.bgen: line 59", "SPARQL.bgen: line 62", "SPARQL.bgen: line 65", "SPARQL.bgen: line 78", "SPARQL.bgen: line 88", "SPARQL.bgen: line 98", "SPARQL.bgen: line 108", "SPARQL.bgen: line 123", "SPARQL.bgen: line 135", "SPARQL.bgen: line 152", "SPARQL.bgen: line 159", "SPARQL.bgen: line 166", "SPARQL.bgen: line 173", "SPARQL.bgen: line 192", "SPARQL.bgen: line 200", "SPARQL.bgen: line 216", "SPARQL.bgen: line 226", "SPARQL.bgen: line 242", "SPARQL.bgen: line 256", "SPARQL.bgen: line 264", "SPARQL.bgen: line 277", "SPARQL.bgen: line 287", "SPARQL.bgen: line 297", "SPARQL.bgen: line 308", "SPARQL.bgen: line 324", "SPARQL.bgen: line 327", "SPARQL.bgen: line 337", "SPARQL.bgen: line 345", "SPARQL.bgen: line 361", "SPARQL.bgen: line 369", "SPARQL.bgen: line 384", "SPARQL.bgen: line 390", "SPARQL.bgen: line 400", "SPARQL.bgen: line 408", "SPARQL.bgen: line 422", "SPARQL.bgen: line 429", "SPARQL.bgen: line 442", "SPARQL.bgen: line 449", "SPARQL.bgen: line 461", "SPARQL.bgen: line 466", "SPARQL.bgen: line 472", "SPARQL.bgen: line 479", "SPARQL.bgen: line 487", "SPARQL.bgen: line 495", "SPARQL.bgen: line 503", "SPARQL.bgen: line 511", "SPARQL.bgen: line 519", "SPARQL.bgen: line 532", "SPARQL.bgen: line 545", "SPARQL.bgen: line 553", "SPARQL.bgen: line 569", "SPARQL.bgen: line 580", "SPARQL.bgen: line 591", "SPARQL.bgen: line 594", "SPARQL.bgen: line 597", "SPARQL.bgen: line 607", "SPARQL.bgen: line 615", "SPARQL.bgen: line 628", "SPARQL.bgen: line 645", "SPARQL.bgen: line 653", "SPARQL.bgen: line 662", "SPARQL.bgen: line 671", "SPARQL.bgen: line 684", "SPARQL.bgen: line 692", "SPARQL.bgen: line 739", "SPARQL.bgen: line 747", "SPARQL.bgen: line 756", "SPARQL.bgen: line 764", "SPARQL.bgen: line 771", "SPARQL.bgen: line 779", "SPARQL.bgen: line 788", "SPARQL.bgen: line 795", "SPARQL.bgen: line 847", "SPARQL.bgen: line 854", "SPARQL.bgen: line 860", "SPARQL.bgen: line 867", "SPARQL.bgen: line 875", "SPARQL.bgen: line 883", "SPARQL.bgen: line 898", "SPARQL.bgen: line 907", "SPARQL.bgen: line 928", "SPARQL.bgen: line 934", "SPARQL.bgen: line 946", "SPARQL.bgen: line 955", "SPARQL.bgen: line 970", "SPARQL.bgen: line 977", "SPARQL.bgen: line 987", "SPARQL.bgen: line 996", "SPARQL.bgen: line 1021", "SPARQL.bgen: line 1024", "SPARQL.bgen: line 1032", "SPARQL.bgen: line 1040", "SPARQL.bgen: line 1048", "SPARQL.bgen: line 1056", "SPARQL.bgen: line 1064", "SPARQL.bgen: line 1077", "SPARQL.bgen: line 1083", "SPARQL.bgen: line 1095", "SPARQL.bgen: line 1105", "SPARQL.bgen: line 1115", "SPARQL.bgen: line 1125", "SPARQL.bgen: line 1140", "SPARQL.bgen: line 1146", "SPARQL.bgen: line 1157", "SPARQL.bgen: line 1166", "SPARQL.bgen: line 1175", "SPARQL.bgen: line 1185", "SPARQL.bgen: line 1200", "SPARQL.bgen: line 1207", "SPARQL.bgen: line 1214", "SPARQL.bgen: line 1221", "SPARQL.bgen: line 1241", "SPARQL.bgen: line 1251", "SPARQL.bgen: line 1261", "SPARQL.bgen: line 1273", "SPARQL.bgen: line 1283", "SPARQL.bgen: line 1293", "SPARQL.bgen: line 1303", "SPARQL.bgen: line 1313", "SPARQL.bgen: line 1323", "SPARQL.bgen: line 1333", "SPARQL.bgen: line 1341", "SPARQL.bgen: line 1352", "SPARQL.bgen: line 1370", "SPARQL.bgen: line 1379", "SPARQL.bgen: line 1396", "SPARQL.bgen: line 1405", "SPARQL.bgen: line 1421", "SPARQL.bgen: line 1430", "SPARQL.bgen: line 1433", "SPARQL.bgen: line 1436", "SPARQL.bgen: line 1439", "SPARQL.bgen: line 1442", "SPARQL.bgen: line 1445", "SPARQL.bgen: line 1448", "SPARQL.bgen: line 1451", "SPARQLTurtleSuperSet.bgen.frag: line 8", "SPARQLTurtleSuperSet.bgen.frag: line 17", "SPARQLTurtleSuperSet.bgen.frag: line 24", "SPARQLTurtleSuperSet.bgen.frag: line 40", "SPARQLTurtleSuperSet.bgen.frag: line 50", "SPARQLTurtleSuperSet.bgen.frag: line 54", "SPARQLTurtleSuperSet.bgen.frag: line 80", "SPARQLTurtleSuperSet.bgen.frag: line 87", "SPARQLTurtleSuperSet.bgen.frag: line 94", "SPARQLTurtleSuperSet.bgen.frag: line 103", "SPARQLTurtleSuperSet.bgen.frag: line 112", "SPARQLTurtleSuperSet.bgen.frag: line 124", "SPARQLTurtleSuperSet.bgen.frag: line 127", "SPARQLTurtleSuperSet.bgen.frag: line 135", "SPARQLTurtleSuperSet.bgen.frag: line 143", "SPARQLTurtleSuperSet.bgen.frag: line 159", "SPARQLTurtleSuperSet.bgen.frag: line 167", "SPARQLTurtleSuperSet.bgen.frag: line 186", "SPARQLTurtleSuperSet.bgen.frag: line 189", "SPARQLTurtleSuperSet.bgen.frag: line 192", "SPARQLTurtleSuperSet.bgen.frag: line 204", "SPARQLTurtleSuperSet.bgen.frag: line 207", "SPARQLTurtleSuperSet.bgen.frag: line 210", "SPARQLTurtleSuperSet.bgen.frag: line 225", "SPARQLTurtleSuperSet.bgen.frag: line 228", "SPARQLTurtleSuperSet.bgen.frag: line 244", "SPARQLTurtleSuperSet.bgen.frag: line 256", "SPARQLTurtleSuperSet.bgen.frag: line 264", "SPARQLTurtleSuperSet.bgen.frag: line 278", "SPARQLTurtleSuperSet.bgen.frag: line 299", "SPARQLTurtleSuperSet.bgen.frag: line 302", "SPARQLTurtleSuperSet.bgen.frag: line 305", "SPARQLTurtleSuperSet.bgen.frag: line 308", "SPARQLTurtleSuperSet.bgen.frag: line 312", "SPARQLTurtleSuperSet.bgen.frag: line 322", "SPARQLTurtleSuperSet.bgen.frag: line 325", "SPARQLTurtleSuperSet.bgen.frag: line 328", "SPARQLTurtleSuperSet.bgen.frag: line 340", "SPARQLTurtleSuperSet.bgen.frag: line 349", "SPARQLTurtleSuperSet.bgen.frag: line 358", "SPARQLTurtleSuperSet.bgen.frag: line 371", "SPARQLTurtleSuperSet.bgen.frag: line 377", "SPARQLTurtleSuperSet.bgen.frag: line 386", "SPARQLTurtleSuperSet.bgen.frag: line 399", "SPARQLTurtleSuperSet.bgen.frag: line 402", "SPARQLTurtleSuperSet.bgen.frag: line 413", "SPARQLTurtleSuperSet.bgen.frag: line 421", "SPARQLTurtleSuperSet.bgen.frag: line 429", "SPARQLTurtleSuperSet.bgen.frag: line 437", "SPARQLTurtleSuperSet.bgen.frag: line 446", "SPARQLTurtleSuperSet.bgen.frag: line 453", "SPARQLTurtleSuperSet.bgen.frag: line 460", "SPARQLTurtleSuperSet.bgen.frag: line 467", "SPARQLTurtleSuperSet.bgen.frag: line 480", "SPARQLTurtleSuperSet.bgen.frag: line 491", }; /* vector of string-names indexed by token number */ static const char* const token_names[] = { "", "error", "$undefined.", "WHITESPACE", "UNION", "COLON", "Q_IRI_CONTENT", "PNAME_NS", "CONSTRUCT", "DESCRIBE", "PNAME_LN", "BLANK_NODE_LABEL", "VARNAME", "PREFIX", "ASTERISK", "DOT", "QUESTION_MARK", "DOLLAR", "BASE", "SELECT", "DISTINCT", "FROM", "NAMED", "OPTIONAL", "FILTER", "GRAPH", "WHERE", "ORDER", "BY", "ASC", "ASK", "DESC", "LIMIT", "OFFSET", "STR", "LANG", "LANGMATCHES", "DATATYPE", "isIRI", "isURI", "isLITERAL", "isBLANK", "BOUND", "REGEX", "A", "TRUE", "FALSE", "DOUBLE_AMPERSAND", "DOUBLE_PIPE", "BANG", "DOUBLE_HAT", "COMMA", "FORWARDSLASH", "LEFT_PAREN", "RIGHT_PAREN", "LEFT_SQUARE", "RIGHT_SQUARE", "SEMICOLON", "INTEGER", "DECIMAL", "DOUBLE", "STRING_LITERAL_DELIMETER_1", "STRING_LITERAL_DELIMETER_2", "STRING_LITERAL_DELIMETER_3", "STRING_LITERAL_DELIMETER_4", "STRING_LITERAL1", "STRING_LITERAL2", "STRING_LITERAL_LONG1", "STRING_LITERAL_LONG2", "NIL", "ANON", "LANGTAG", "LEFT_CURLY", "RIGHT_CURLY", "PLUS", "MINUS", "EQUALITY_OP", "NOT_EQUAL", "LESS_THAN", "GREATER_THAN", "LESS_THAN_EQUAL", "GREATER_THAN_EQUAL", "Query", "QueryTypes", "DescribeQuery", "ConstructQuery", "Prolog", "PrefixDeclList", "PrefixDecl", "BaseDecl", "AskQuery", "SelectQuery", "VAR_REFERENCES", "VariableReferenceList", "IRIref", "PrefixedName", "DataSetClauseList", "DataSetClause", "WhereClause", "SolutionModifier", "OrderClause", "OrderConditionList", "OrderCondition", "LimitClause", "OffsetClause", "GroupGraphPattern", "GraphPatternList", "GraphPattern", "GraphPatternNotTriples", "AlternativeGroupGraphPatterns", "ConditionalOrExpression", "ConditionalAndExpressionList", "ConditionalAndExpression", "ValueLogicalList", "RelationalExpression", "AdditiveExpression", "MultiplicativeExpressionList", "MultiplicativeExpression", "UnaryExpressionList", "UnaryExpression", "BuiltInCall", "RegexExpression", "FunctionCall", "ArgumentList", "PrimaryExpression", "Filter", "Triples", "TriplesSameSubject", "PropertyList", "PropertyListNotEmpty", "ObjectList", "GraphNode", "Verb", "TriplesNode", "Collection", "GraphNodeList", "Var", "GraphTerm", "NumericLiteral", "RDFLiteral", "BooleanLiteral", "String", "BlankNode", "0", }; /* symbol number of symbol that rule derives. */ static const int derives[] = {0, 82, 83, 83, 83, 83, 84, 84, 84, 84, 85, 85, 86, 86, 86, 86, 87, 87, 88, 88, 89, 90, 90, 91, 91, 91, 91, 92, 92, 93, 93, 94, 94, 95, 95, 96, 96, 97, 97, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 101, 101, 102, 102, 102, 102, 102, 102, 103, 104, 105, 105, 105, 105, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 109, 109, 110, 110, 111, 111, 112, 112, 113, 113, 114, 114, 114, 114, 114, 114, 114, 115, 115, 116, 116, 116, 116, 117, 117, 118, 118, 118, 118, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 122, 122, 123, 123, 124, 124, 124, 124, 124, 124, 124, 124, 124, 125, 125, 125, 126, 126, 126, 127, 127, 127, 127, 127, 128, 128, 129, 129, 130, 130, 131, 131, 131, 132, 132, 132, 133, 133, 134, 135, 135, 136, 137, 137, 137, 137, 137, 137, 137, 137, 138, 138, 138, 139, 139, 139, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142}; /* number of symbols composing right hand side of rule. */ static const int rhs_size[] = {0, 2, 1, 1, 1, 1, 5, 3, 4, 4, 7, 6, 2, 1, 1, 0, 1, 2, 5, 4, 4, 2, 3, 4, 5, 5, 6, 1, 1, 1, 2, 3, 1, 1, 1, 1, 2, 2, 3, 2, 1, 0, 1, 2, 3, 3, 2, 2, 1, 1, 3, 1, 2, 4, 4, 1, 1, 3, 1, 2, 2, 2, 4, 3, 3, 1, 2, 2, 3, 2, 1, 2, 3, 1, 2, 2, 1, 2, 3, 3, 3, 2, 3, 1, 2, 2, 3, 2, 1, 2, 3, 1, 3, 3, 3, 3, 3, 3, 1, 2, 2, 2, 3, 3, 1, 2, 2, 2, 3, 3, 2, 2, 2, 1, 4, 4, 6, 4, 4, 4, 4, 4, 4, 1, 6, 8, 4, 2, 1, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 4, 2, 2, 3, 2, 1, 2, 2, 4, 2, 1, 1, 0, 2, 4, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 1, 2, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 1, 1, 3, 3, 3, 3, 2, 2, 2, 2, 1, 1}; /* default rule to reduce with in state. 0 means the default is an error. indexed by state number */ static const int default_action[] = {15, 0, 0, 0, 14, 16, 13, 0, 0, 0, 0, 0, 0, 1, 4, 3, 5, 2, 17, 12, 0, 0, 0, 167, 28, 41, 27, 29, 0, 0, 0, 0, 0, 0, 35, 21, 40, 0, 19, 20, 33, 34, 193, 182, 183, 0, 0, 176, 177, 178, 0, 0, 0, 0, 175, 192, 0, 0, 0, 168, 32, 0, 144, 149, 0, 0, 170, 169, 173, 179, 174, 0, 0, 0, 41, 41, 7, 42, 49, 48, 30, 0, 0, 41, 0, 37, 39, 0, 0, 0, 61, 76, 0, 65, 73, 70, 0, 36, 22, 18, 151, 165, 157, 162, 0, 156, 158, 161, 160, 0, 0, 159, 188, 0, 190, 0, 189, 0, 191, 0, 171, 172, 0, 143, 0, 148, 145, 146, 0, 180, 0, 59, 60, 41, 8, 9, 43, 47, 46, 0, 41, 41, 23, 38, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 140, 123, 141, 0, 0, 0, 0, 77, 64, 66, 74, 71, 69, 67, 63, 0, 0, 150, 164, 166, 151, 152, 154, 184, 186, 185, 187, 31, 142, 0, 41, 181, 0, 0, 0, 50, 51, 56, 55, 58, 6, 44, 45, 41, 25, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 132, 0, 83, 88, 91, 98, 104, 131, 133, 113, 138, 135, 134, 136, 137, 0, 127, 80, 78, 79, 81, 0, 72, 68, 62, 163, 147, 0, 151, 41, 11, 0, 0, 0, 52, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 0, 111, 112, 139, 0, 84, 0, 87, 0, 0, 0, 0, 0, 0, 0, 0, 99, 0, 0, 105, 128, 0, 82, 155, 153, 10, 0, 0, 57, 114, 115, 0, 117, 119, 120, 122, 121, 118, 0, 130, 85, 0, 89, 0, 92, 93, 94, 95, 96, 97, 100, 101, 0, 0, 106, 107, 0, 0, 0, 126, 53, 54, 0, 0, 86, 90, 103, 102, 108, 109, 129, 116, 0, 124, 0, 125, 0, 0, 0}; /* default state to go to after a reduction of a rule. indexed by variable number (lhs token) */ static const int default_goto[] = {340, 13, 14, 15, 3, 4, 5, 6, 16, 17, 25, 26, 217, 60, 33, 34, 35, 76, 77, 192, 193, 78, 79, 36, 92, 93, 94, 164, 284, 269, 219, 271, 220, 221, 280, 222, 283, 223, 224, 158, 225, 285, 226, 95, 61, 62, 173, 174, 178, 101, 110, 102, 63, 104, 227, 65, 228, 229, 230, 69, 231}; /* index in yytable of the portion describing state (indexed by state number) If the value in yytable is positive, we shift the token and go to that state. If the value is negative, it is minus a rule number to reduce by. If the value is zero, the default action from yydefact[s] is used. */ static const int action_idx[] = {47, 97, -64, 299, 11, -32768, 11, -40, 52, 50, 1, 250, 139, -32768, -32768, -32768, -32768, -32768, -32768, 11, 19, 53, 312, -32768, -32768, 311, 169, -32768, 1, 181, 21, 113, 339, 181, -32768, -32768, -32768, 108, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 408, 119, -32768, -32768, -32768, 101, 116, 87, 131, -32768, -32768, 110, 110, 190, -32768, -32768, -9, -32768, 119, 119, 119, -32768, -32768, -32768, -30, -32768, 177, 110, 110, 490, 309, -32768, 281, 179, -32768, -32768, 182, 182, 309, 27, -32768, -32768, 113, 397, 239, -32768, 206, 287, -32768, 434, 495, 275, -32768, -32768, -32768, 215, -32768, -32768, -32768, 521, -32768, -32768, -32768, -32768, 158, 582, -32768, -32768, 162, -32768, 168, -32768, 170, -32768, 180, -32768, -32768, 173, 608, 182, -32768, -32768, -32768, 38, -32768, 669, -32768, -32768, 309, -32768, -32768, 179, 226, -32768, 182, 309, 309, -32768, -32768, -32768, 214, 238, 241, 243, 248, 252, 263, 272, 277, 286, 714, 72, -32768, -32768, -32768, 113, 113, 113, 113, 330, -32768, -32768, 754, 288, 754, 288, -32768, 354, 289, -32768, -32768, -32768, 253, 82, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 182, 389, -32768, 301, 303, 815, 874, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 389, -32768, -32768, 919, 919, 919, 919, 919, 919, 919, 919, 169, 919, 964, 1009, 1054, 1054, 72, 314, 318, 333, 379, 246, 9, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 1099, -32768, -32768, -32768, -32768, -32768, 113, 288, 288, -32768, -32768, -32768, 1139, 262, 389, -32768, 1200, 1200, 334, -32768, -32768, 335, 337, 342, 341, 351, 352, 356, 369, 370, 345, -32768, 371, -32768, -32768, -32768, 1200, 380, 1200, 382, 1200, 1200, 1200, 1200, 1200, 1200, 1200, 1200, 278, 1200, 1200, 43, 391, 376, -32768, -32768, -32768, -32768, 393, 394, -32768, -32768, -32768, 1200, -32768, -32768, -32768, -32768, -32768, -32768, 1200, -32768, -32768, 1200, -32768, 1200, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 1200, 1200, -32768, -32768, 1200, 1200, 1200, -32768, -32768, -32768, 398, 165, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, 1200, -32768, 410, -32768, 443, 451, -32768}; /* The index in yytable of the portion describing what to do after reducing a rule. The value from yytable is the state to go to. */ static const int goto_idx[] = {-32768, -32768, -32768, -32768, -32768, 456, 7, -32768, -32768, -32768, 31, -32768, -13, -32768, 49, 118, 147, 115, -32768, -32768, 273, -74, 58, 117, 377, -84, -32768, -32768, 33, -32768, -191, -32768, -169, 270, -32768, -249, -32768, 6, -86, -32768, -83, 152, 60, -32768, 8, 353, -156, 220, -32768, -100, -32768, -32768, -38, -32768, -10, -18, 51, 17, 24, -32768, 48}; /* A vector filled with portions for different uses. (using action_idx and goto_idx) */ static const int yytable[] = {27, 27, 157, 136, 176, 159, 123, 103, 166, 59, 179, 18, 64, 23, 8, 24, 80, 85, 27, 59, 128, 243, 64, 281, 1, 37, 18, 106, 40, 314, 315, 41, 59, 108, 40, 105, 111, 41, 20, 67, 96, 129, 29, 84, 194, 40, 68, 195, 41, 67, 108, 108, 108, 111, 111, 111, 68, 320, 21, 81, 1, 282, 67, 199, 124, 2, 103, 330, 331, 68, 70, 143, 103, 66, 74, 156, 160, 304, 82, 161, 70, 59, 59, 66, 64, 64, 106, 108, 166, 288, 111, 59, 106, 70, 105, 321, 66, 59, 38, 58, 105, 306, 168, 170, 7, 58, 194, 120, 121, 195, 59, 67, 67, 64, 328, 188, 58, 156, 68, 68, 196, 67, 22, 131, 132, 232, 40, 67, 68, 41, 139, 23, 39, 244, 68, 137, 138, 162, 329, 245, 67, 233, 70, 70, 287, 66, 66, 68, 86, 91, 116, 97, 70, 117, 59, 66, 59, 64, 70, 64, 30, 66, 112, 107, 108, 31, 113, 111, 47, 48, 49, 70, 75, 186, 66, 239, 83, 240, 114, 156, 98, 23, 196, 115, 67, 32, 67, 99, 218, 134, 135, 68, 97, 68, 198, 118, 122, 58, 142, 119, 97, 261, 30, 30, 144, 130, 103, 31, 31, 91, 163, 32, 73, 91, 177, 70, 336, 70, 66, 337, 66, 133, 40, 180, 250, 41, 106, 23, 140, 141, 181, 59, 108, 182, 105, 111, 253, 254, 255, 256, 257, 258, 259, 260, 183, 262, 40, 264, 197, 41, 42, 23, 184, 32, 32, 201, 202, 97, 72, 107, 40, 67, 23, 41, 24, 23, 109, 203, 68, 40, 28, 187, 41, 263, 23, 265, 266, 234, 235, 236, 237, 290, 291, 125, 126, 127, 200, 318, 319, 91, 123, 204, 70, 58, 205, 66, 206, 107, 87, 88, 89, 207, 247, 123, 97, 208, 107, 9, 10, 55, 87, 88, 89, 72, 73, 252, 209, 58, 11, 40, 278, 279, 41, 42, 23, 210, 332, 333, 326, 12, 211, 58, 30, 246, 238, 327, 71, 31, 71, 212, 58, 72, 73, 72, 73, 242, 40, 32, 171, 41, 42, 23, 316, 317, 248, 286, 249, 43, 44, 32, 165, 289, 87, 88, 89, 45, 268, 46, 267, 338, 47, 48, 49, 50, 51, 52, 53, 87, 88, 89, 270, 54, 55, 32, 43, 44, 56, 57, 292, 293, 58, 294, 45, 295, 46, 296, 302, 47, 48, 49, 50, 51, 52, 53, 40, 297, 298, 41, 54, 55, 299, 32, 90, 56, 57, 40, 71, 58, 41, 42, 23, 72, 73, 300, 301, 303, 32, 241, 305, 307, 323, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 40, 322, 341, 41, 42, 23, 324, 325, 167, 155, 342, 335, 43, 44, 272, 273, 274, 275, 276, 277, 45, 19, 100, 339, 251, 47, 48, 49, 50, 51, 52, 53, 172, 334, 58, 185, 54, 55, 43, 44, 0, 56, 57, 0, 0, 58, 45, 0, 46, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 40, 54, 55, 41, 42, 23, 56, 57, 169, 30, 58, 0, 0, 0, 31, 71, 0, 0, 0, 0, 72, 73, 0, 0, 0, 0, 40, 0, 0, 41, 42, 23, 0, 0, 0, 0, 0, 0, 43, 44, 308, 309, 310, 311, 312, 313, 45, 0, 46, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 32, 0, 54, 55, 43, 44, 0, 56, 57, 0, 0, 58, 45, 175, 100, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 40, 54, 55, 41, 42, 23, 56, 57, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 41, 42, 23, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 0, 0, 0, 0, 45, 0, 100, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 54, 55, 43, 44, 0, 56, 57, 0, 0, 58, 45, 0, 46, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 40, 54, 55, 41, 0, 23, 56, 57, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 189, 0, 190, 0, 0, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 0, 0, 0, 0, 0, 0, 0, 40, 191, 0, 41, 42, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 40, 0, 213, 41, 42, 23, 214, 0, 0, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 215, 216, 0, 0, 58, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 0, 0, 0, 0, 45, 0, 46, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 40, 54, 55, 41, 42, 23, 56, 57, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 213, 0, 0, 0, 214, 0, 0, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 40, 0, 0, 41, 55, 23, 0, 0, 215, 216, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 189, 0, 190, 0, 0, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 0, 0, 0, 0, 0, 0, 0, 40, 191, 0, 41, 42, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 213, 0, 0, 40, 214, 0, 41, 42, 23, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 215, 216, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 0, 0, 0, 40, 214, 0, 41, 42, 23, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 213, 0, 0, 40, 214, 0, 41, 42, 23, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 215, 216, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 0, 0, 0, 40, 214, 0, 41, 42, 23, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 58, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 40, 0, 213, 41, 42, 23, 214, 0, 0, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 215, 216, 0, 0, 58, 0, 0, 0, 0, 0, 0, 43, 44, 0, 0, 0, 0, 0, 0, 45, 0, 100, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 40, 54, 55, 41, 42, 23, 56, 57, 0, 0, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 0, 43, 44, 0, 0, 213, 0, 0, 0, 214, 0, 0, 0, 0, 47, 48, 49, 50, 51, 52, 53, 0, 0, 0, 0, 0, 55, 0, 0, 0, 215, 216, 0, 0, 58}; /* a vector indexed in parallel with yytable. It indicates the bounds of the portion you are trying to examine. */ static const int yycheck[] = {10, 11, 88, 77, 104, 88, 15, 45, 92, 22, 110, 4, 22, 12, 78, 14, 26, 30, 28, 32, 50, 177, 32, 14, 13, 6, 19, 45, 7, 278, 279, 10, 45, 46, 7, 45, 46, 10, 78, 22, 32, 71, 11, 22, 130, 7, 22, 130, 10, 32, 63, 64, 65, 63, 64, 65, 32, 14, 6, 28, 13, 52, 45, 137, 73, 18, 104, 316, 317, 45, 22, 84, 110, 22, 25, 88, 89, 268, 29, 89, 32, 94, 95, 32, 94, 95, 104, 100, 172, 245, 100, 104, 110, 45, 104, 52, 45, 110, 79, 78, 110, 270, 94, 95, 7, 78, 192, 56, 57, 192, 123, 94, 95, 123, 305, 128, 78, 130, 94, 95, 130, 104, 72, 72, 73, 53, 7, 110, 104, 10, 81, 12, 79, 51, 110, 77, 78, 89, 307, 57, 123, 69, 94, 95, 244, 94, 95, 123, 31, 32, 63, 33, 104, 66, 167, 104, 169, 167, 110, 169, 21, 110, 61, 44, 177, 26, 65, 177, 58, 59, 60, 123, 25, 124, 123, 167, 29, 169, 62, 192, 33, 12, 192, 67, 167, 72, 169, 79, 155, 74, 75, 167, 74, 169, 136, 64, 6, 78, 83, 68, 82, 211, 21, 21, 87, 28, 244, 26, 26, 92, 4, 72, 33, 96, 56, 167, 51, 169, 167, 54, 169, 74, 7, 61, 191, 10, 244, 12, 81, 82, 62, 244, 245, 63, 244, 245, 203, 204, 205, 206, 207, 208, 209, 210, 64, 212, 7, 214, 133, 10, 11, 12, 79, 72, 72, 140, 141, 139, 32, 44, 7, 244, 12, 10, 14, 12, 46, 53, 244, 7, 20, 124, 10, 213, 12, 215, 216, 160, 161, 162, 163, 248, 249, 63, 64, 65, 139, 281, 282, 172, 15, 53, 244, 78, 53, 244, 53, 44, 23, 24, 25, 53, 187, 15, 186, 53, 44, 8, 9, 70, 23, 24, 25, 32, 33, 200, 53, 78, 19, 7, 74, 75, 10, 11, 12, 53, 320, 321, 295, 30, 53, 78, 21, 186, 4, 302, 27, 26, 27, 53, 78, 32, 33, 32, 33, 56, 7, 72, 73, 10, 11, 12, 74, 75, 53, 238, 53, 45, 46, 72, 73, 246, 23, 24, 25, 53, 48, 55, 54, 336, 58, 59, 60, 61, 62, 63, 64, 23, 24, 25, 47, 69, 70, 72, 45, 46, 74, 75, 54, 54, 78, 54, 53, 51, 55, 54, 51, 58, 59, 60, 61, 62, 63, 64, 7, 54, 54, 10, 69, 70, 54, 72, 73, 74, 75, 7, 27, 78, 10, 11, 12, 32, 33, 54, 54, 54, 72, 73, 48, 47, 54, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 7, 51, 0, 10, 11, 12, 54, 54, 15, 53, 0, 54, 45, 46, 76, 77, 78, 79, 80, 81, 53, 6, 55, 54, 192, 58, 59, 60, 61, 62, 63, 64, 96, 322, 78, 123, 69, 70, 45, 46, -1, 74, 75, -1, -1, 78, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, 7, 69, 70, 10, 11, 12, 74, 75, 15, 21, 78, -1, -1, -1, 26, 27, -1, -1, -1, -1, 32, 33, -1, -1, -1, -1, 7, -1, -1, 10, 11, 12, -1, -1, -1, -1, -1, -1, 45, 46, 272, 273, 274, 275, 276, 277, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, 72, -1, 69, 70, 45, 46, -1, 74, 75, -1, -1, 78, 53, 54, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, 7, 69, 70, 10, 11, 12, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 7, -1, -1, 10, 11, 12, -1, -1, -1, -1, -1, -1, 45, 46, -1, -1, -1, -1, -1, -1, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, 69, 70, 45, 46, -1, 74, 75, -1, -1, 78, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, 7, 69, 70, 10, -1, 12, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 29, -1, 31, -1, -1, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, -1, -1, -1, -1, -1, -1, -1, 7, 53, -1, 10, 11, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, 7, -1, 49, 10, 11, 12, 53, -1, -1, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, 45, 46, -1, -1, -1, -1, -1, -1, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, 7, 69, 70, 10, 11, 12, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, 49, -1, -1, -1, 53, -1, -1, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, 7, -1, -1, 10, 70, 12, -1, -1, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, -1, -1, -1, 29, -1, 31, -1, -1, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, -1, -1, -1, -1, -1, -1, -1, 7, 53, -1, 10, 11, 12, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, 49, -1, -1, 7, 53, -1, 10, 11, 12, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, 74, 75, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, -1, -1, -1, 7, 53, -1, 10, 11, 12, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, -1, -1, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, 49, -1, -1, 7, 53, -1, 10, 11, 12, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, 74, 75, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, -1, -1, -1, 7, 53, -1, 10, 11, 12, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, -1, -1, -1, -1, 78, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, 7, -1, 49, 10, 11, 12, 53, -1, -1, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, 45, 46, -1, -1, -1, -1, -1, -1, 53, -1, 55, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, 7, 69, 70, 10, 11, 12, 74, 75, -1, -1, 78, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, -1, 45, 46, -1, -1, 49, -1, -1, -1, 53, -1, -1, -1, -1, 58, 59, 60, 61, 62, 63, 64, -1, -1, -1, -1, -1, 70, -1, -1, -1, 74, 75, -1, -1, 78}; #define YYLAST 1278 #define YYFINAL 342 #define YYFLAG -32768 #define YYNTBASE 82 #define LEXER_OP_FAILURE 0 #define LEXER_OP_SUCCESS 1 #define LEXER_OP_BOL 2 #define LEXER_OP_EOL 3 #define LEXER_OP_EOF 4 #define LEXER_OP_ANY 5 #define LEXER_OP_LITERAL 6 #define LEXER_OP_NOT_LITERAL 7 #define LEXER_OP_CHARSET 8 #define LEXER_OP_NOT_CHARSET 9 #define LEXER_OP_ASSERT 10 #define LEXER_OP_BRANCH 11 #define LEXER_OP_REPEAT 12 #define LEXER_OP_REPEAT_RANGE 13 #define LEXER_CHARSET_FAILURE 0 #define LEXER_CHARSET_LITERAL 1 #define LEXER_CHARSET_RANGE 2 #define LEXER_CHARSET_SMALL 3 #define LEXER_CHARSET_BIG 4 #define LEXER_INITIAL 1 #define LEXER_STRING_MODE_SHORT_2 2 #define LEXER_STRING_MODE_LONG_2 3 #define LEXER_STRING_MODE_LONG_1 4 #define LEXER_STRING_MODE_SHORT_1 5 #define LEXER_IRI_MODE 6 #define LEXER_START_STATE LEXER_INITIAL static unsigned char lexer_charsets[69][32] = { { 0xFF, 0xFF, 0xFF, 0xFF, 0x81, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00, 0x40, 0x01, 0x00, 0x00, 0x38, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x26, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x07, 0xFE, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x03, 0xFE, 0xFF, 0xFF, 0x07, 0xFE, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x07, 0xFE, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF3, 0x7F, 0xFE, 0xFD, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0x31, 0xFC }, { 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x00, 0xF8, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0xD7, 0xFF, 0xFF, 0xFB, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x7F, 0x54, 0xFD, 0xFF, 0x0F, 0x00 }, { 0xFE, 0xDF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xDF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x9F, 0x19, 0xFF, 0xFF, 0xFF, 0xCF, 0x3F, 0x03 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0x7F, 0x02, 0xFE, 0xFF, 0xFF, 0xFF, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x07, 0x07, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0x07, 0xFE, 0x07, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7C, 0xFF, 0x7F, 0x2F, 0x00, 0x60, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x23, 0x00, 0x00, 0x00, 0xFF, 0x03, 0x00, 0x00, 0x00, 0xE0, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xC5, 0x03, 0x00, 0x00, 0x00, 0xB0, 0x03, 0x00, 0x03, 0x00 }, { 0xE0, 0x87, 0xF9, 0xFF, 0xFF, 0xFD, 0x6D, 0x03, 0x00, 0x00, 0x00, 0x5E, 0x00, 0x00, 0x1C, 0x00, 0xE0, 0xAF, 0xFB, 0xFF, 0xFF, 0xFD, 0xED, 0x23, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 }, { 0xE0, 0x9F, 0xF9, 0xFF, 0xFF, 0xFD, 0xCD, 0x23, 0x00, 0x00, 0x00, 0xB0, 0x03, 0x00, 0x00, 0x00, 0xE0, 0xC7, 0x3D, 0xD6, 0x18, 0xC7, 0xBF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xE0, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xEF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0xE0, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xEF, 0x03, 0x00, 0x00, 0x00, 0x40, 0x03, 0x00, 0x00, 0x00 }, { 0xE0, 0xDF, 0xFD, 0xFF, 0xFF, 0xFD, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x0D, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x96, 0x25, 0xF0, 0xFE, 0xAE, 0x6C, 0x0D, 0x20, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFE, 0xFF, 0xFF, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x00 }, { 0xED, 0xDA, 0x07, 0x00, 0x00, 0x00, 0x00, 0x50, 0x01, 0x50, 0x31, 0x82, 0xAB, 0x62, 0x2C, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0xC9, 0x80, 0xF5, 0x07, 0x00, 0x00, 0x00, 0x00, 0x08, 0x01, 0x02 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x03 }, { 0xFF, 0xFF, 0x3F, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x3F, 0xFF, 0xAA, 0xFF, 0xFF, 0xFF, 0x3F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xDF, 0x5F, 0xDC, 0x1F, 0xCF, 0x0F, 0xFF, 0x1F, 0xDC, 0x1F }, { 0x00, 0x00, 0x00, 0x00, 0x40, 0x4C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07 }, { 0xE0, 0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x80, 0x00, 0x00, 0x00, 0xFE, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFB, 0xFF, 0xFF, 0xBB, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF8, 0x07, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xFF, 0x9F, 0x3D, 0x00, 0x00 }, { 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0xFF, 0x3F, 0x1E, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0x9F, 0x39, 0x80, 0x00, 0x0C, 0x00, 0x00, 0x00 }, { 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0x87, 0x39, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0xBF, 0x3B, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xD0, 0x8F, 0x39, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xC7, 0x3D, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x0E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xDF, 0x3D, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xDF, 0x3D, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC0, 0xCF, 0x3D, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF2, 0x07, 0x80, 0x7F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF2, 0x1B, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0xA0, 0xC2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xDF, 0x0F, 0xBF, 0xFE, 0xFF, 0x3F, 0xFE, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x1F, 0x02, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0xFC, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x70 }, { 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0x7F, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xBF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, { 0x00, 0x30, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00 }, { 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F }, { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x24, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x84, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x40, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x24, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, { 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, }; static unsigned char lexer_blockmaps[6][256] = { { 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, 0x14, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x15, 0x16, 0x0B, 0x17, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x18, 0x19, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B }, { 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x1C, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1D, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B }, { 0x0B, 0x0B, 0x0B, 0x20, 0x21, 0x22, 0x23, 0x0B, 0x0B, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x2B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x2C, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B }, { 0x2D, 0x0B, 0x2E, 0x2F, 0x0B, 0x0B, 0x30, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x31, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x32, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B }, { 0x34, 0x1A, 0x1A, 0x35, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x36, 0x37, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x1A, 0x1A, 0x1A, 0x38, 0x39, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x1A, 0x3A }, { 0x2D, 0x0B, 0x0B, 0x3B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B, 0x0B }, }; static const Py_UCS4 lexer_INITIAL_pattern_0[] = { 6, 39, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_1[] = { 6, 39, 6, 39, 6, 39, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_2[] = { 6, 34, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_3[] = { 6, 34, 6, 34, 6, 34, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_4[] = { 6, 60, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_5[] = { 6, 64, 12, 8, 1, 8, 4, 3, 2, 0, 1, 12, 14, 0, 6, 45, 12, 8, 1, 8, 4, 3, 3, 0, 1, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_6[] = { 6, 40, 12, 8, 0, 8, 4, 3, 1, 0, 1, 6, 41, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_7[] = { 6, 91, 12, 8, 0, 8, 4, 3, 1, 0, 1, 6, 93, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_8[] = { 13, 80, 0, 1, 11, 4, 6, 95, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 0, 12, 51, 0, 11, 7, 8, 4, 3, 30, 0, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 7, 8, 4, 4, 2, 0, 1, 7, 8, 4, 4, 3, 0, 1, 0, 1, 1, 6, 58, 11, 4, 6, 95, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 0, 12, 51, 0, 11, 7, 8, 4, 3, 30, 0, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 7, 8, 4, 4, 2, 0, 1, 7, 8, 4, 4, 3, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_9[] = { 13, 80, 0, 1, 11, 4, 6, 95, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 0, 12, 51, 0, 11, 7, 8, 4, 3, 30, 0, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 7, 8, 4, 4, 2, 0, 1, 7, 8, 4, 4, 3, 0, 1, 0, 1, 1, 6, 58, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_10[] = { 6, 95, 6, 58, 11, 4, 6, 95, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 0, 12, 51, 0, 11, 7, 8, 4, 3, 30, 0, 1, 18, 11, 7, 8, 4, 4, 0, 0, 1, 7, 8, 4, 4, 1, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 7, 8, 4, 4, 2, 0, 1, 7, 8, 4, 4, 3, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_11[] = { 8, 4, 3, 51, 0, 11, 15, 11, 4, 6, 95, 1, 7, 8, 4, 4, 4, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 0, 12, 34, 0, 11, 7, 8, 4, 4, 5, 0, 1, 15, 11, 4, 6, 95, 1, 7, 8, 4, 4, 4, 0, 1, 0, 1, 7, 8, 4, 3, 31, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_12[] = { 6, 64, 12, 8, 1, 8, 4, 3, 2, 0, 1, 12, 14, 0, 6, 45, 12, 8, 1, 8, 4, 3, 3, 0, 1, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_13[] = { 12, 8, 1, 8, 4, 3, 31, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_14[] = { 11, 22, 12, 8, 1, 8, 4, 3, 31, 0, 1, 6, 46, 12, 8, 0, 8, 4, 3, 31, 0, 1, 1, 13, 6, 46, 12, 8, 1, 8, 4, 3, 31, 0, 1, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_15[] = { 11, 46, 12, 8, 1, 8, 4, 3, 31, 0, 1, 6, 46, 12, 8, 0, 8, 4, 3, 31, 0, 1, 8, 4, 3, 60, 0, 13, 9, 0, 1, 8, 4, 3, 61, 0, 1, 12, 8, 1, 8, 4, 3, 31, 0, 1, 1, 37, 6, 46, 12, 8, 1, 8, 4, 3, 31, 0, 1, 8, 4, 3, 60, 0, 13, 9, 0, 1, 8, 4, 3, 61, 0, 1, 12, 8, 1, 8, 4, 3, 31, 0, 1, 1, 35, 12, 8, 1, 8, 4, 3, 31, 0, 1, 8, 4, 3, 60, 0, 13, 9, 0, 1, 8, 4, 3, 61, 0, 1, 12, 8, 1, 8, 4, 3, 31, 0, 1, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_16[] = { 6, 35, 12, 8, 0, 9, 4, 3, 62, 0, 1, 13, 19, 0, 1, 11, 7, 8, 4, 3, 62, 0, 1, 6, 6, 13, 6, 10, 1, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_17[] = { 12, 8, 1, 8, 4, 3, 1, 0, 1, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_18[] = { 11, 12, 6, 117, 6, 110, 6, 105, 6, 111, 6, 110, 1, 12, 6, 85, 6, 78, 6, 73, 6, 79, 6, 78, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_19[] = { 6, 42, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_20[] = { 11, 10, 6, 98, 6, 97, 6, 115, 6, 101, 1, 10, 6, 66, 6, 65, 6, 83, 6, 69, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_21[] = { 11, 14, 6, 112, 6, 114, 6, 101, 6, 102, 6, 105, 6, 120, 1, 14, 6, 80, 6, 82, 6, 69, 6, 70, 6, 73, 6, 88, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_22[] = { 11, 14, 6, 115, 6, 101, 6, 108, 6, 101, 6, 99, 6, 116, 1, 14, 6, 83, 6, 69, 6, 76, 6, 69, 6, 67, 6, 84, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_23[] = { 11, 18, 6, 100, 6, 105, 6, 115, 6, 116, 6, 105, 6, 110, 6, 99, 6, 116, 1, 18, 6, 68, 6, 73, 6, 83, 6, 84, 6, 73, 6, 78, 6, 67, 6, 84, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_24[] = { 11, 10, 6, 102, 6, 114, 6, 111, 6, 109, 1, 10, 6, 70, 6, 82, 6, 79, 6, 77, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_25[] = { 11, 12, 6, 110, 6, 97, 6, 109, 6, 101, 6, 100, 1, 12, 6, 78, 6, 65, 6, 77, 6, 69, 6, 68, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_26[] = { 11, 18, 6, 111, 6, 112, 6, 116, 6, 105, 6, 111, 6, 110, 6, 97, 6, 108, 1, 18, 6, 79, 6, 80, 6, 84, 6, 73, 6, 79, 6, 78, 6, 65, 6, 76, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_27[] = { 11, 14, 6, 102, 6, 105, 6, 108, 6, 116, 6, 101, 6, 114, 1, 14, 6, 70, 6, 73, 6, 76, 6, 84, 6, 69, 6, 82, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_28[] = { 11, 12, 6, 103, 6, 114, 6, 97, 6, 112, 6, 104, 1, 12, 6, 71, 6, 82, 6, 65, 6, 80, 6, 72, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_29[] = { 11, 12, 6, 119, 6, 104, 6, 101, 6, 114, 6, 101, 1, 12, 6, 87, 6, 72, 6, 69, 6, 82, 6, 69, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_30[] = { 11, 12, 6, 111, 6, 114, 6, 100, 6, 101, 6, 114, 1, 12, 6, 79, 6, 82, 6, 68, 6, 69, 6, 82, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_31[] = { 11, 6, 6, 98, 6, 121, 1, 6, 6, 66, 6, 89, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_32[] = { 11, 8, 6, 97, 6, 115, 6, 99, 1, 8, 6, 65, 6, 83, 6, 67, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_33[] = { 6, 65, 6, 83, 6, 75, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_34[] = { 6, 67, 6, 79, 6, 78, 6, 83, 6, 84, 6, 82, 6, 85, 6, 67, 6, 84, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_35[] = { 6, 68, 6, 69, 6, 83, 6, 67, 6, 82, 6, 73, 6, 66, 6, 69, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_36[] = { 11, 10, 6, 100, 6, 101, 6, 115, 6, 99, 1, 10, 6, 68, 6, 69, 6, 83, 6, 67, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_37[] = { 11, 12, 6, 108, 6, 105, 6, 109, 6, 105, 6, 116, 1, 12, 6, 76, 6, 73, 6, 77, 6, 73, 6, 84, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_38[] = { 11, 14, 6, 111, 6, 102, 6, 102, 6, 115, 6, 101, 6, 116, 1, 14, 6, 79, 6, 70, 6, 70, 6, 83, 6, 69, 6, 84, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_39[] = { 11, 8, 6, 115, 6, 116, 6, 114, 1, 8, 6, 83, 6, 84, 6, 82, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_40[] = { 11, 10, 6, 108, 6, 97, 6, 110, 6, 103, 1, 10, 6, 76, 6, 65, 6, 78, 6, 71, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_41[] = { 11, 24, 6, 108, 6, 97, 6, 110, 6, 103, 6, 77, 6, 97, 6, 116, 6, 99, 6, 104, 6, 101, 6, 115, 1, 24, 6, 76, 6, 65, 6, 78, 6, 71, 6, 77, 6, 65, 6, 84, 6, 67, 6, 72, 6, 69, 6, 83, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_42[] = { 11, 18, 6, 100, 6, 97, 6, 116, 6, 97, 6, 116, 6, 121, 6, 112, 6, 101, 1, 18, 6, 68, 6, 65, 6, 84, 6, 65, 6, 84, 6, 89, 6, 80, 6, 69, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_43[] = { 6, 105, 6, 115, 6, 73, 6, 82, 6, 73, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_44[] = { 11, 12, 6, 105, 6, 115, 6, 85, 6, 114, 6, 105, 1, 12, 6, 105, 6, 115, 6, 85, 6, 82, 6, 73, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_45[] = { 11, 16, 6, 105, 6, 115, 6, 66, 6, 108, 6, 97, 6, 110, 6, 107, 1, 16, 6, 105, 6, 115, 6, 66, 6, 76, 6, 65, 6, 78, 6, 75, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_46[] = { 11, 20, 6, 105, 6, 115, 6, 76, 6, 105, 6, 116, 6, 101, 6, 114, 6, 97, 6, 108, 1, 20, 6, 105, 6, 115, 6, 76, 6, 73, 6, 84, 6, 69, 6, 82, 6, 65, 6, 76, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_47[] = { 11, 12, 6, 98, 6, 111, 6, 117, 6, 110, 6, 100, 1, 12, 6, 66, 6, 79, 6, 85, 6, 78, 6, 68, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_48[] = { 11, 12, 6, 114, 6, 101, 6, 103, 6, 101, 6, 120, 1, 12, 6, 82, 6, 69, 6, 71, 6, 69, 6, 88, 1, 0, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_49[] = { 6, 116, 6, 114, 6, 117, 6, 101, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_50[] = { 6, 102, 6, 97, 6, 108, 6, 115, 6, 101, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_51[] = { 6, 97, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_52[] = { 6, 45, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_53[] = { 6, 43, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_54[] = { 6, 38, 6, 38, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_55[] = { 6, 124, 6, 124, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_56[] = { 6, 33, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_57[] = { 6, 94, 6, 94, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_58[] = { 6, 44, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_59[] = { 6, 58, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_60[] = { 6, 63, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_61[] = { 6, 36, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_62[] = { 6, 47, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_63[] = { 6, 40, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_64[] = { 6, 41, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_65[] = { 6, 91, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_66[] = { 6, 93, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_67[] = { 6, 61, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_68[] = { 6, 33, 6, 61, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_69[] = { 6, 60, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_70[] = { 6, 62, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_71[] = { 6, 60, 6, 61, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_72[] = { 6, 62, 6, 61, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_73[] = { 6, 59, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_74[] = { 6, 123, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_75[] = { 6, 125, 1 }; static const Py_UCS4 lexer_INITIAL_pattern_76[] = { 6, 46, 1 }; static const Py_UCS4 *lexer_INITIAL_patterns[] = { lexer_INITIAL_pattern_0, lexer_INITIAL_pattern_1, lexer_INITIAL_pattern_2, lexer_INITIAL_pattern_3, lexer_INITIAL_pattern_4, lexer_INITIAL_pattern_5, lexer_INITIAL_pattern_6, lexer_INITIAL_pattern_7, lexer_INITIAL_pattern_8, lexer_INITIAL_pattern_9, lexer_INITIAL_pattern_10, lexer_INITIAL_pattern_11, lexer_INITIAL_pattern_12, lexer_INITIAL_pattern_13, lexer_INITIAL_pattern_14, lexer_INITIAL_pattern_15, lexer_INITIAL_pattern_16, lexer_INITIAL_pattern_17, lexer_INITIAL_pattern_18, lexer_INITIAL_pattern_19, lexer_INITIAL_pattern_20, lexer_INITIAL_pattern_21, lexer_INITIAL_pattern_22, lexer_INITIAL_pattern_23, lexer_INITIAL_pattern_24, lexer_INITIAL_pattern_25, lexer_INITIAL_pattern_26, lexer_INITIAL_pattern_27, lexer_INITIAL_pattern_28, lexer_INITIAL_pattern_29, lexer_INITIAL_pattern_30, lexer_INITIAL_pattern_31, lexer_INITIAL_pattern_32, lexer_INITIAL_pattern_33, lexer_INITIAL_pattern_34, lexer_INITIAL_pattern_35, lexer_INITIAL_pattern_36, lexer_INITIAL_pattern_37, lexer_INITIAL_pattern_38, lexer_INITIAL_pattern_39, lexer_INITIAL_pattern_40, lexer_INITIAL_pattern_41, lexer_INITIAL_pattern_42, lexer_INITIAL_pattern_43, lexer_INITIAL_pattern_44, lexer_INITIAL_pattern_45, lexer_INITIAL_pattern_46, lexer_INITIAL_pattern_47, lexer_INITIAL_pattern_48, lexer_INITIAL_pattern_49, lexer_INITIAL_pattern_50, lexer_INITIAL_pattern_51, lexer_INITIAL_pattern_52, lexer_INITIAL_pattern_53, lexer_INITIAL_pattern_54, lexer_INITIAL_pattern_55, lexer_INITIAL_pattern_56, lexer_INITIAL_pattern_57, lexer_INITIAL_pattern_58, lexer_INITIAL_pattern_59, lexer_INITIAL_pattern_60, lexer_INITIAL_pattern_61, lexer_INITIAL_pattern_62, lexer_INITIAL_pattern_63, lexer_INITIAL_pattern_64, lexer_INITIAL_pattern_65, lexer_INITIAL_pattern_66, lexer_INITIAL_pattern_67, lexer_INITIAL_pattern_68, lexer_INITIAL_pattern_69, lexer_INITIAL_pattern_70, lexer_INITIAL_pattern_71, lexer_INITIAL_pattern_72, lexer_INITIAL_pattern_73, lexer_INITIAL_pattern_74, lexer_INITIAL_pattern_75, lexer_INITIAL_pattern_76, NULL }; static const Py_UCS4 lexer_STRING_MODE_SHORT_2_pattern_0[] = { 12, 21, 0, 11, 7, 9, 4, 3, 63, 0, 1, 9, 6, 92, 8, 4, 3, 64, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_STRING_MODE_SHORT_2_pattern_1[] = { 6, 34, 1 }; static const Py_UCS4 *lexer_STRING_MODE_SHORT_2_patterns[] = { lexer_STRING_MODE_SHORT_2_pattern_0, lexer_STRING_MODE_SHORT_2_pattern_1, NULL }; static const Py_UCS4 lexer_STRING_MODE_LONG_2_pattern_0[] = { 12, 28, 0, 13, 6, 0, 2, 6, 34, 1, 11, 7, 9, 4, 3, 66, 0, 1, 9, 6, 92, 8, 4, 3, 64, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_STRING_MODE_LONG_2_pattern_1[] = { 6, 34, 6, 34, 6, 34, 1 }; static const Py_UCS4 *lexer_STRING_MODE_LONG_2_patterns[] = { lexer_STRING_MODE_LONG_2_pattern_0, lexer_STRING_MODE_LONG_2_pattern_1, NULL }; static const Py_UCS4 lexer_STRING_MODE_LONG_1_pattern_0[] = { 12, 28, 0, 6, 39, 8, 4, 3, 67, 0, 11, 7, 9, 4, 3, 68, 0, 1, 9, 6, 92, 8, 4, 3, 64, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_STRING_MODE_LONG_1_pattern_1[] = { 6, 39, 6, 39, 6, 39, 1 }; static const Py_UCS4 *lexer_STRING_MODE_LONG_1_patterns[] = { lexer_STRING_MODE_LONG_1_pattern_0, lexer_STRING_MODE_LONG_1_pattern_1, NULL }; static const Py_UCS4 lexer_STRING_MODE_SHORT_1_pattern_0[] = { 12, 21, 0, 11, 7, 9, 4, 3, 65, 0, 1, 9, 6, 92, 8, 4, 3, 64, 0, 1, 0, 1, 1 }; static const Py_UCS4 lexer_STRING_MODE_SHORT_1_pattern_1[] = { 6, 39, 1 }; static const Py_UCS4 *lexer_STRING_MODE_SHORT_1_patterns[] = { lexer_STRING_MODE_SHORT_1_pattern_0, lexer_STRING_MODE_SHORT_1_pattern_1, NULL }; static const Py_UCS4 lexer_IRI_MODE_pattern_0[] = { 6, 62, 1 }; static const Py_UCS4 lexer_IRI_MODE_pattern_1[] = { 12, 8, 0, 9, 4, 3, 0, 0, 1, 1 }; static const Py_UCS4 lexer_IRI_MODE_pattern_2[] = { 11, 4, 6, 61, 1, 11, 12, 8, 1, 8, 4, 3, 1, 0, 1, 1, 0, 1 }; static const Py_UCS4 *lexer_IRI_MODE_patterns[] = { lexer_IRI_MODE_pattern_0, lexer_IRI_MODE_pattern_1, lexer_IRI_MODE_pattern_2, NULL }; static const Py_UCS4 **lexer_patterns[] = { NULL, lexer_INITIAL_patterns, lexer_STRING_MODE_SHORT_2_patterns, lexer_STRING_MODE_LONG_2_patterns, lexer_STRING_MODE_LONG_1_patterns, lexer_STRING_MODE_SHORT_1_patterns, lexer_IRI_MODE_patterns }; static const int lexer_INITIAL_actions[] = { 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79 }; static const int lexer_STRING_MODE_SHORT_2_actions[] = { 80, 81 }; static const int lexer_STRING_MODE_LONG_2_actions[] = { 84, 85 }; static const int lexer_STRING_MODE_LONG_1_actions[] = { 86, 87 }; static const int lexer_STRING_MODE_SHORT_1_actions[] = { 82, 83 }; static const int lexer_IRI_MODE_actions[] = { 0, 1, 2 }; static const int *lexer_actions[] = { NULL, lexer_INITIAL_actions, lexer_STRING_MODE_SHORT_2_actions, lexer_STRING_MODE_LONG_2_actions, lexer_STRING_MODE_LONG_1_actions, lexer_STRING_MODE_SHORT_1_actions, lexer_IRI_MODE_actions }; #if PY_VERSION_HEX < 0x02020000 || !defined(Py_USING_UNICODE) #error "Python 2.2+ with unicode support required" #endif /* Static Definitions */ #define YYEMPTY -2 #define YYERROR -1 #define YYEOF 0 #define YYINITDEPTH 1000 #define LEXER_INITIAL_BACKTRACKS 20 /* Parsing objects */ typedef struct { PyObject_HEAD int verbose; PyObject *dict; } parserobject; typedef struct { PyObject *text; int last; int state; Py_UNICODE *end; Py_UNICODE *position; /* backtracking stack */ int backtracks; Py_UNICODE **positions; int allocated; } lexerobject; static int parser_yylex(parserobject *, lexerobject *, PyObject **); static lexerobject *lexer_new(PyObject *); static void lexer_free(lexerobject *); static int lexer_save_position(lexerobject *); static Py_UNICODE *lexer_restore_position(lexerobject *); static int lexer_charset(parserobject *, Py_UCS4 *, Py_UCS4, int); static int lexer_match(parserobject *, lexerobject *, Py_UCS4 *); static void lexer_error(lexerobject *); static char *unicode_escape(Py_UNICODE *, int); static PyObject *report_error(int state, PyObject *lval, lexerobject *lexer); static void print_reduce(int ruleno); static void print_state_stack(int *stack, int *end); /* Parser Methods */ /* Attempt to use C99 variable argument macros for improved error detection * (just in case). */ #ifdef __STDC__ /* C99 conformance macro */ #define TRACE(...) if (self->verbose > 0) PySys_WriteStderr(__VA_ARGS__) #define REGEX_TRACE(...) if (self->verbose > 1) PySys_WriteStderr(__VA_ARGS__) #else #define TRACE if (self->verbose > 0) PySys_WriteStderr #define REGEX_TRACE if (self->verbose > 1) PySys_WriteStderr #endif static char parse_doc[] = "\ parse(string) -> object\n\ Converts the given string to a parse tree and return the top-most\n\ element of the tree."; static PyObject* parser_parse(register parserobject *self, PyObject *text) { register int yystate; register int yyn; PyObject *yylval = NULL; PyObject *yyval = NULL; int state_stack[YYINITDEPTH]; int *state_ptr; PyObject *value_stack[YYINITDEPTH]; PyObject **value_ptr; int yylen; int yychar = YYEMPTY; /* cause a token to be read */ int yychar1 = 0; lexerobject *lexer; lexer = lexer_new(text); if (lexer == NULL) return NULL; TRACE("Starting parse\n"); /* Initialize stack pointers Waste one element of value and location stack so that they stay on the same level as the state stack. The wasted elements are never initialized. */ state_ptr = state_stack - 1; value_ptr = value_stack; yystate = 0; while (1) { /* Push a new state, which is found in yystate. */ /* In all cases, when you get here, the value and location stacks have just been pushed. So pushing a state here evens the stacks. */ *++state_ptr = yystate; TRACE("Entering state %d\n", yystate); /* Do appropriate processing given the current state. */ /* Read a lookahead token if we need one and don't already have one. */ /* First try to decide what to do without reference to lookahead token. */ yyn = action_idx[yystate]; if (yyn == YYFLAG) { yyn = default_action[yystate]; if (yyn == 0) { return report_error(yystate, yylval, lexer); } /* Do a reduction. yyn is the number of a rule to reduce with. */ if (self->verbose) print_reduce(yyn); yylen = rhs_size[yyn]; state_ptr -= yylen; value_ptr -= yylen; if (yylen > 0) yyval = value_ptr[1]; /* Action routines */ switch (yyn) { case 1: { /* from SPARQL.bgen, line 41 * Query: Prolog QueryTypes */ yyval = PyObject_CallMethod(Query, "Query", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Query("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 2: { /* from SPARQL.bgen, line 52 * QueryTypes: SelectQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 5: { /* from SPARQL.bgen, line 65 * QueryTypes: AskQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 6: { /* from SPARQL.bgen, line 78 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 7: { /* from SPARQL.bgen, line 88 * DescribeQuery: DESCRIBE VAR_REFERENCES SolutionModifier */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,Py_None,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 8: { /* from SPARQL.bgen, line 98 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],Py_None,value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 9: { /* from SPARQL.bgen, line 108 * DescribeQuery: DESCRIBE VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 10: { /* from SPARQL.bgen, line 123 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],value_ptr[5],value_ptr[6],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); break; } case 11: { /* from SPARQL.bgen, line 135 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],Py_None,value_ptr[5],value_ptr[6]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 12: { /* from SPARQL.bgen, line 152 * Prolog: BaseDecl PrefixDeclList */ yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 13: { /* from SPARQL.bgen, line 159 * Prolog: BaseDecl */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], Py_None); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 14: { /* from SPARQL.bgen, line 166 * Prolog: PrefixDeclList */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", Py_None, value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 15: { /* from SPARQL.bgen, line 173 * Prolog: */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--Prolog("); fprintf(stderr, ")\n"); } break; } case 16: { /* from SPARQL.bgen, line 192 * PrefixDeclList: PrefixDecl */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 17: { /* from SPARQL.bgen, line 200 * PrefixDeclList: PrefixDeclList PrefixDecl */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 18: { /* from SPARQL.bgen, line 216 * PrefixDecl: PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 19: { /* from SPARQL.bgen, line 226 * PrefixDecl: PREFIX PNAME_NS LESS_THAN GREATER_THAN */ PyObject *t = PyObject_GetAttrString(Bindings, "EMPTY_STRING"); yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 20: { /* from SPARQL.bgen, line 242 * BaseDecl: BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "BaseDeclaration", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BaseDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 21: { /* from SPARQL.bgen, line 256 * AskQuery: ASK WhereClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "AskQuery", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 22: { /* from SPARQL.bgen, line 264 * AskQuery: ASK DataSetClauseList WhereClause */ yyval = PyObject_CallMethod(Query, "AskQuery", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 23: { /* from SPARQL.bgen, line 277 * SelectQuery: SELECT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 24: { /* from SPARQL.bgen, line 287 * SelectQuery: SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 25: { /* from SPARQL.bgen, line 297 * SelectQuery: SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],Py_None,value_ptr[4],value_ptr[5],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 26: { /* from SPARQL.bgen, line 308 * SelectQuery: SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],value_ptr[4],value_ptr[5],value_ptr[6],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 28: { /* from SPARQL.bgen, line 327 * VAR_REFERENCES: ASTERISK */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--VAR_REFERENCES("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 29: { /* from SPARQL.bgen, line 337 * VariableReferenceList: Var */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 30: { /* from SPARQL.bgen, line 345 * VariableReferenceList: VariableReferenceList Var */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 31: { /* from SPARQL.bgen, line 361 * IRIref: LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(IRIRef, "IRIRef", "O",value_ptr[2]); if (self->verbose) { fprintf(stderr, "--IRIref("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 33: { /* from SPARQL.bgen, line 384 * PrefixedName: PNAME_NS */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 34: { /* from SPARQL.bgen, line 390 * PrefixedName: PNAME_LN */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 35: { /* from SPARQL.bgen, line 400 * DataSetClauseList: DataSetClause */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 36: { /* from SPARQL.bgen, line 408 * DataSetClauseList: DataSetClauseList DataSetClause */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 37: { /* from SPARQL.bgen, line 422 * DataSetClause: FROM IRIref */ yyval = PyObject_CallMethod(IRIRef, "RemoteGraph", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 38: { /* from SPARQL.bgen, line 429 * DataSetClause: FROM NAMED IRIref */ yyval = PyObject_CallMethod(IRIRef, "NamedGraph", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 39: { /* from SPARQL.bgen, line 442 * WhereClause: WHERE GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 40: { /* from SPARQL.bgen, line 449 * WhereClause: GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 41: { /* from SPARQL.bgen, line 461 * SolutionModifier: */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", ""); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); fprintf(stderr, ")\n"); } break; } case 42: { /* from SPARQL.bgen, line 466 * SolutionModifier: OrderClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 43: { /* from SPARQL.bgen, line 472 * SolutionModifier: OrderClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OO",value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 44: { /* from SPARQL.bgen, line 479 * SolutionModifier: OrderClause LimitClause OffsetClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 45: { /* from SPARQL.bgen, line 487 * SolutionModifier: OrderClause OffsetClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 46: { /* from SPARQL.bgen, line 495 * SolutionModifier: LimitClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 47: { /* from SPARQL.bgen, line 503 * SolutionModifier: OrderClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 48: { /* from SPARQL.bgen, line 511 * SolutionModifier: OffsetClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,Py_None,value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 49: { /* from SPARQL.bgen, line 519 * SolutionModifier: LimitClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],Py_None); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 50: { /* from SPARQL.bgen, line 532 * OrderClause: ORDER BY OrderConditionList */ yyval = value_ptr[3]; Py_INCREF(value_ptr[3]); if (self->verbose) { fprintf(stderr, "--OrderClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 51: { /* from SPARQL.bgen, line 545 * OrderConditionList: OrderCondition */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 52: { /* from SPARQL.bgen, line 553 * OrderConditionList: OrderConditionList OrderCondition */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 53: { /* from SPARQL.bgen, line 569 * OrderCondition: ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "ASCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 54: { /* from SPARQL.bgen, line 580 * OrderCondition: DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "DESCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 57: { /* from SPARQL.bgen, line 597 * OrderCondition: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "UNSPECIFIED_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 59: { /* from SPARQL.bgen, line 615 * LimitClause: LIMIT NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--LimitClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 60: { /* from SPARQL.bgen, line 628 * OffsetClause: OFFSET NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--OffsetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 61: { /* from SPARQL.bgen, line 645 * GroupGraphPattern: LEFT_CURLY RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 62: { /* from SPARQL.bgen, line 653 * GroupGraphPattern: LEFT_CURLY Triples GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 63: { /* from SPARQL.bgen, line 662 * GroupGraphPattern: LEFT_CURLY Triples RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 64: { /* from SPARQL.bgen, line 671 * GroupGraphPattern: LEFT_CURLY GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 65: { /* from SPARQL.bgen, line 684 * GraphPatternList: GraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 66: { /* from SPARQL.bgen, line 692 * GraphPatternList: GraphPatternList GraphPattern */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 67: { /* from SPARQL.bgen, line 739 * GraphPattern: Filter Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 68: { /* from SPARQL.bgen, line 747 * GraphPattern: Filter DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 69: { /* from SPARQL.bgen, line 756 * GraphPattern: Filter DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 70: { /* from SPARQL.bgen, line 764 * GraphPattern: Filter */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 71: { /* from SPARQL.bgen, line 771 * GraphPattern: GraphPatternNotTriples Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 72: { /* from SPARQL.bgen, line 779 * GraphPattern: GraphPatternNotTriples DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 73: { /* from SPARQL.bgen, line 788 * GraphPattern: GraphPatternNotTriples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 74: { /* from SPARQL.bgen, line 795 * GraphPattern: GraphPatternNotTriples DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 75: { /* from SPARQL.bgen, line 847 * GraphPatternNotTriples: OPTIONAL GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedOptionalGraphPattern", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 76: { /* from SPARQL.bgen, line 854 * GraphPatternNotTriples: GroupGraphPattern */ yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 77: { /* from SPARQL.bgen, line 860 * GraphPatternNotTriples: GroupGraphPattern AlternativeGroupGraphPatterns */ yyval = PyObject_CallMethod(GraphPattern, "ParsedAlternativeGraphPattern", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 78: { /* from SPARQL.bgen, line 867 * GraphPatternNotTriples: GRAPH Var GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 79: { /* from SPARQL.bgen, line 875 * GraphPatternNotTriples: GRAPH BlankNode GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 80: { /* from SPARQL.bgen, line 883 * GraphPatternNotTriples: GRAPH IRIref GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 81: { /* from SPARQL.bgen, line 898 * AlternativeGroupGraphPatterns: UNION GroupGraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 82: { /* from SPARQL.bgen, line 907 * AlternativeGroupGraphPatterns: AlternativeGroupGraphPatterns UNION GroupGraphPattern */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 84: { /* from SPARQL.bgen, line 934 * ConditionalOrExpression: ConditionalAndExpression ConditionalAndExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedConditionalAndExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalOrExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 85: { /* from SPARQL.bgen, line 946 * ConditionalAndExpressionList: DOUBLE_PIPE ConditionalAndExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 86: { /* from SPARQL.bgen, line 955 * ConditionalAndExpressionList: ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 87: { /* from SPARQL.bgen, line 970 * ConditionalAndExpression: RelationalExpression ValueLogicalList */ yyval = PyObject_CallMethod(Expression, "ParsedRelationalExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 89: { /* from SPARQL.bgen, line 987 * ValueLogicalList: DOUBLE_AMPERSAND RelationalExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 90: { /* from SPARQL.bgen, line 996 * ValueLogicalList: ValueLogicalList DOUBLE_AMPERSAND RelationalExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 92: { /* from SPARQL.bgen, line 1024 * RelationalExpression: AdditiveExpression EQUALITY_OP AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "EqualityOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 93: { /* from SPARQL.bgen, line 1032 * RelationalExpression: AdditiveExpression NOT_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "NotEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 94: { /* from SPARQL.bgen, line 1040 * RelationalExpression: AdditiveExpression LESS_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 95: { /* from SPARQL.bgen, line 1048 * RelationalExpression: AdditiveExpression GREATER_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 96: { /* from SPARQL.bgen, line 1056 * RelationalExpression: AdditiveExpression LESS_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 97: { /* from SPARQL.bgen, line 1064 * RelationalExpression: AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 99: { /* from SPARQL.bgen, line 1083 * AdditiveExpression: MultiplicativeExpression MultiplicativeExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedAdditiveExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); /*yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2]);*/ if (self->verbose) { fprintf(stderr, "--AdditiveExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 100: { /* from SPARQL.bgen, line 1095 * MultiplicativeExpressionList: PLUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 101: { /* from SPARQL.bgen, line 1105 * MultiplicativeExpressionList: MINUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 102: { /* from SPARQL.bgen, line 1115 * MultiplicativeExpressionList: MultiplicativeExpressionList MINUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 103: { /* from SPARQL.bgen, line 1125 * MultiplicativeExpressionList: MultiplicativeExpressionList PLUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 105: { /* from SPARQL.bgen, line 1146 * MultiplicativeExpression: UnaryExpression UnaryExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedMultiplicativeExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 106: { /* from SPARQL.bgen, line 1157 * UnaryExpressionList: ASTERISK UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 107: { /* from SPARQL.bgen, line 1166 * UnaryExpressionList: FORWARDSLASH UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 108: { /* from SPARQL.bgen, line 1175 * UnaryExpressionList: UnaryExpressionList ASTERISK UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 109: { /* from SPARQL.bgen, line 1185 * UnaryExpressionList: UnaryExpressionList FORWARDSLASH UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 110: { /* from SPARQL.bgen, line 1200 * UnaryExpression: BANG PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "LogicalNegation", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 111: { /* from SPARQL.bgen, line 1207 * UnaryExpression: PLUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericPositive", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 112: { /* from SPARQL.bgen, line 1214 * UnaryExpression: MINUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericNegative", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 114: { /* from SPARQL.bgen, line 1241 * BuiltInCall: STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "STR"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 115: { /* from SPARQL.bgen, line 1251 * BuiltInCall: LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANG"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 116: { /* from SPARQL.bgen, line 1261 * BuiltInCall: LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANGMATCHES"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OOO", funcName,value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 117: { /* from SPARQL.bgen, line 1273 * BuiltInCall: DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "DATATYPE"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 118: { /* from SPARQL.bgen, line 1283 * BuiltInCall: BOUND LEFT_PAREN Var RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "BOUND"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 119: { /* from SPARQL.bgen, line 1293 * BuiltInCall: isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isIRI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 120: { /* from SPARQL.bgen, line 1303 * BuiltInCall: isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isURI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 121: { /* from SPARQL.bgen, line 1313 * BuiltInCall: isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isBLANK"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 122: { /* from SPARQL.bgen, line 1323 * BuiltInCall: isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isLITERAL"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 124: { /* from SPARQL.bgen, line 1341 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OO", value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 125: { /* from SPARQL.bgen, line 1352 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OOO", value_ptr[3],value_ptr[5],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[8], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); Py_DECREF(value_ptr[8]); break; } case 126: { /* from SPARQL.bgen, line 1370 * FunctionCall: IRIref LEFT_PAREN ArgumentList RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], value_ptr[3]); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 127: { /* from SPARQL.bgen, line 1379 * FunctionCall: IRIref NIL */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], PyList_New(0)); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 128: { /* from SPARQL.bgen, line 1396 * ArgumentList: ConditionalOrExpression */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 129: { /* from SPARQL.bgen, line 1405 * ArgumentList: ConditionalOrExpression COMMA ArgumentList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 130: { /* from SPARQL.bgen, line 1421 * PrimaryExpression: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--PrimaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 139: { /* from SPARQLTurtleSuperSet.bgen.frag, line 8 * Filter: FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(Filter, "ParsedExpressionFilter", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 140: { /* from SPARQLTurtleSuperSet.bgen.frag, line 17 * Filter: FILTER BuiltInCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 141: { /* from SPARQLTurtleSuperSet.bgen.frag, line 24 * Filter: FILTER FunctionCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 142: { /* from SPARQLTurtleSuperSet.bgen.frag, line 40 * Triples: Triples DOT TriplesSameSubject */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 144: { /* from SPARQLTurtleSuperSet.bgen.frag, line 54 * Triples: TriplesSameSubject */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 145: { /* from SPARQLTurtleSuperSet.bgen.frag, line 80 * TriplesSameSubject: Var PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 146: { /* from SPARQLTurtleSuperSet.bgen.frag, line 87 * TriplesSameSubject: GraphTerm PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 147: { /* from SPARQLTurtleSuperSet.bgen.frag, line 94 * TriplesSameSubject: LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList */ yyval = PyObject_CallMethod(Resource, "TwiceReferencedBlankNode", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 148: { /* from SPARQLTurtleSuperSet.bgen.frag, line 103 * TriplesSameSubject: Collection PropertyListNotEmpty */ PyObject_CallMethod(value_ptr[1], "setPropertyValueList", "O", value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 151: { /* from SPARQLTurtleSuperSet.bgen.frag, line 127 * PropertyList: */ yyval = PyList_New(0); if (self->verbose) { fprintf(stderr, "--PropertyList("); fprintf(stderr, ")\n"); } break; } case 152: { /* from SPARQLTurtleSuperSet.bgen.frag, line 135 * PropertyListNotEmpty: Verb ObjectList */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 153: { /* from SPARQLTurtleSuperSet.bgen.frag, line 143 * PropertyListNotEmpty: Verb ObjectList SEMICOLON PropertyList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2]),value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 154: { /* from SPARQLTurtleSuperSet.bgen.frag, line 159 * ObjectList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 155: { /* from SPARQLTurtleSuperSet.bgen.frag, line 167 * ObjectList: ObjectList COMMA GraphNode */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 161: { /* from SPARQLTurtleSuperSet.bgen.frag, line 210 * Verb: A */ yyval = PyObject_GetAttrString(RDF, "type"); if (self->verbose) { fprintf(stderr, "--Verb("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 163: { /* from SPARQLTurtleSuperSet.bgen.frag, line 228 * TriplesNode: LEFT_SQUARE PropertyList RIGHT_SQUARE */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Resource, "Resource", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 164: { /* from SPARQLTurtleSuperSet.bgen.frag, line 244 * Collection: LEFT_PAREN GraphNodeList RIGHT_PAREN */ yyval = PyObject_CallMethod(Resource, "ParsedCollection", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Collection("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 165: { /* from SPARQLTurtleSuperSet.bgen.frag, line 256 * GraphNodeList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 166: { /* from SPARQLTurtleSuperSet.bgen.frag, line 264 * GraphNodeList: GraphNodeList GraphNode */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 167: { /* from SPARQLTurtleSuperSet.bgen.frag, line 278 * Var: VARNAME */ PyObject *varName = PySequence_GetSlice(value_ptr[1], 1, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "Variable", "O", varName); Py_XDECREF(varName); if (self->verbose) { fprintf(stderr, "--Var("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 172: { /* from SPARQLTurtleSuperSet.bgen.frag, line 312 * GraphTerm: MINUS NumericLiteral */ PyObject *negNum = PyNumber_Negative(PyObject_CallMethod(value_ptr[2],"toPython",NULL)); yyval = PyObject_CallMethod(rdflib, "Literal", "O", negNum); Py_XDECREF(negNum); if (self->verbose) { fprintf(stderr, "--GraphTerm("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 176: { /* from SPARQLTurtleSuperSet.bgen.frag, line 340 * NumericLiteral: INTEGER */ PyObject *num = PyNumber_Int(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 177: { /* from SPARQLTurtleSuperSet.bgen.frag, line 349 * NumericLiteral: DECIMAL */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 178: { /* from SPARQLTurtleSuperSet.bgen.frag, line 358 * NumericLiteral: DOUBLE */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 179: { /* from SPARQLTurtleSuperSet.bgen.frag, line 371 * RDFLiteral: String */ yyval = PyObject_CallMethod(rdflib, "Literal", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 180: { /* from SPARQLTurtleSuperSet.bgen.frag, line 377 * RDFLiteral: String LANGTAG */ PyObject *lang = PySequence_GetSlice(value_ptr[2], 1, PyString_GET_SIZE(value_ptr[2])); yyval = PyObject_CallMethod(rdflib, "Literal", "OO", value_ptr[1], lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 181: { /* from SPARQLTurtleSuperSet.bgen.frag, line 386 * RDFLiteral: String DOUBLE_HAT IRIref */ yyval = PyObject_CallMethod(Expression, "ParsedDatatypedLiteral", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 184: { /* from SPARQLTurtleSuperSet.bgen.frag, line 413 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 185: { /* from SPARQLTurtleSuperSet.bgen.frag, line 421 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 186: { /* from SPARQLTurtleSuperSet.bgen.frag, line 429 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 187: { /* from SPARQLTurtleSuperSet.bgen.frag, line 437 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 188: { /* from SPARQLTurtleSuperSet.bgen.frag, line 446 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 189: { /* from SPARQLTurtleSuperSet.bgen.frag, line 453 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 190: { /* from SPARQLTurtleSuperSet.bgen.frag, line 460 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 191: { /* from SPARQLTurtleSuperSet.bgen.frag, line 467 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 192: { /* from SPARQLTurtleSuperSet.bgen.frag, line 480 * BlankNode: ANON */ yyval = PyObject_CallMethod(rdflib, "Variable", "O", PyObject_CallMethod(rdflib, "BNode","")); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 193: { /* from SPARQLTurtleSuperSet.bgen.frag, line 491 * BlankNode: BLANK_NODE_LABEL */ PyObject *lang = PySequence_GetSlice(value_ptr[1], 2, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "BNode", "O",lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } } if (!yyval) { lexer_free(lexer); return NULL; } *++value_ptr = yyval; if (self->verbose) print_state_stack(state_stack, state_ptr); /* Now "shift" the result of the reduction. */ /* Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = derives[yyn] - YYNTBASE; yystate = goto_idx[yyn] + *state_ptr; if (yystate >= 0 && yystate <= YYLAST && yycheck[yystate] == *state_ptr) { yystate = yytable[yystate]; } else { yystate = default_goto[yyn]; } continue; } /* if (yyn == YYFLAG) */ /* Not known => get a lookahead token if don't already have one. */ /* yychar is either YYEMPTY, YYEOF or a valid token in external form */ if (yychar == YYEMPTY) { TRACE("Reading a token: "); yychar = parser_yylex(self, lexer, &yylval); } /* Convert token to internal form (in yychar1) for indexing tables with */ if (yychar <= 0) { if (yychar == YYERROR) { lexer_free(lexer); return NULL; } /* This means end-of-input. */ yychar1 = 0; TRACE("Now at end of input.\n"); } else { yychar1 = YYTRANSLATE(yychar); TRACE("Next token is %d (%s)\n", yychar, token_names[yychar1]); yyn += yychar1; } if (yyn < 0 || yyn > YYLAST || yycheck[yyn] != yychar1) { /* comes here after end of input */ yyn = default_action[yystate]; if (yyn == 0) { return report_error(yystate, NULL, lexer); } /* Do a reduction. yyn is the number of a rule to reduce with. */ if (self->verbose) print_reduce(yyn); yylen = rhs_size[yyn]; state_ptr -= yylen; value_ptr -= yylen; if (yylen > 0) yyval = value_ptr[1]; /* Action routines */ switch (yyn) { case 1: { /* from SPARQL.bgen, line 41 * Query: Prolog QueryTypes */ yyval = PyObject_CallMethod(Query, "Query", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Query("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 2: { /* from SPARQL.bgen, line 52 * QueryTypes: SelectQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 5: { /* from SPARQL.bgen, line 65 * QueryTypes: AskQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 6: { /* from SPARQL.bgen, line 78 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 7: { /* from SPARQL.bgen, line 88 * DescribeQuery: DESCRIBE VAR_REFERENCES SolutionModifier */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,Py_None,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 8: { /* from SPARQL.bgen, line 98 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],Py_None,value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 9: { /* from SPARQL.bgen, line 108 * DescribeQuery: DESCRIBE VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 10: { /* from SPARQL.bgen, line 123 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],value_ptr[5],value_ptr[6],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); break; } case 11: { /* from SPARQL.bgen, line 135 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],Py_None,value_ptr[5],value_ptr[6]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 12: { /* from SPARQL.bgen, line 152 * Prolog: BaseDecl PrefixDeclList */ yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 13: { /* from SPARQL.bgen, line 159 * Prolog: BaseDecl */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], Py_None); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 14: { /* from SPARQL.bgen, line 166 * Prolog: PrefixDeclList */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", Py_None, value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 15: { /* from SPARQL.bgen, line 173 * Prolog: */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--Prolog("); fprintf(stderr, ")\n"); } break; } case 16: { /* from SPARQL.bgen, line 192 * PrefixDeclList: PrefixDecl */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 17: { /* from SPARQL.bgen, line 200 * PrefixDeclList: PrefixDeclList PrefixDecl */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 18: { /* from SPARQL.bgen, line 216 * PrefixDecl: PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 19: { /* from SPARQL.bgen, line 226 * PrefixDecl: PREFIX PNAME_NS LESS_THAN GREATER_THAN */ PyObject *t = PyObject_GetAttrString(Bindings, "EMPTY_STRING"); yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 20: { /* from SPARQL.bgen, line 242 * BaseDecl: BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "BaseDeclaration", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BaseDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 21: { /* from SPARQL.bgen, line 256 * AskQuery: ASK WhereClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "AskQuery", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 22: { /* from SPARQL.bgen, line 264 * AskQuery: ASK DataSetClauseList WhereClause */ yyval = PyObject_CallMethod(Query, "AskQuery", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 23: { /* from SPARQL.bgen, line 277 * SelectQuery: SELECT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 24: { /* from SPARQL.bgen, line 287 * SelectQuery: SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 25: { /* from SPARQL.bgen, line 297 * SelectQuery: SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],Py_None,value_ptr[4],value_ptr[5],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 26: { /* from SPARQL.bgen, line 308 * SelectQuery: SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],value_ptr[4],value_ptr[5],value_ptr[6],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 28: { /* from SPARQL.bgen, line 327 * VAR_REFERENCES: ASTERISK */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--VAR_REFERENCES("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 29: { /* from SPARQL.bgen, line 337 * VariableReferenceList: Var */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 30: { /* from SPARQL.bgen, line 345 * VariableReferenceList: VariableReferenceList Var */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 31: { /* from SPARQL.bgen, line 361 * IRIref: LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(IRIRef, "IRIRef", "O",value_ptr[2]); if (self->verbose) { fprintf(stderr, "--IRIref("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 33: { /* from SPARQL.bgen, line 384 * PrefixedName: PNAME_NS */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 34: { /* from SPARQL.bgen, line 390 * PrefixedName: PNAME_LN */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 35: { /* from SPARQL.bgen, line 400 * DataSetClauseList: DataSetClause */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 36: { /* from SPARQL.bgen, line 408 * DataSetClauseList: DataSetClauseList DataSetClause */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 37: { /* from SPARQL.bgen, line 422 * DataSetClause: FROM IRIref */ yyval = PyObject_CallMethod(IRIRef, "RemoteGraph", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 38: { /* from SPARQL.bgen, line 429 * DataSetClause: FROM NAMED IRIref */ yyval = PyObject_CallMethod(IRIRef, "NamedGraph", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 39: { /* from SPARQL.bgen, line 442 * WhereClause: WHERE GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 40: { /* from SPARQL.bgen, line 449 * WhereClause: GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 41: { /* from SPARQL.bgen, line 461 * SolutionModifier: */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", ""); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); fprintf(stderr, ")\n"); } break; } case 42: { /* from SPARQL.bgen, line 466 * SolutionModifier: OrderClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 43: { /* from SPARQL.bgen, line 472 * SolutionModifier: OrderClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OO",value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 44: { /* from SPARQL.bgen, line 479 * SolutionModifier: OrderClause LimitClause OffsetClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 45: { /* from SPARQL.bgen, line 487 * SolutionModifier: OrderClause OffsetClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 46: { /* from SPARQL.bgen, line 495 * SolutionModifier: LimitClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 47: { /* from SPARQL.bgen, line 503 * SolutionModifier: OrderClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 48: { /* from SPARQL.bgen, line 511 * SolutionModifier: OffsetClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,Py_None,value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 49: { /* from SPARQL.bgen, line 519 * SolutionModifier: LimitClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],Py_None); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 50: { /* from SPARQL.bgen, line 532 * OrderClause: ORDER BY OrderConditionList */ yyval = value_ptr[3]; Py_INCREF(value_ptr[3]); if (self->verbose) { fprintf(stderr, "--OrderClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 51: { /* from SPARQL.bgen, line 545 * OrderConditionList: OrderCondition */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 52: { /* from SPARQL.bgen, line 553 * OrderConditionList: OrderConditionList OrderCondition */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 53: { /* from SPARQL.bgen, line 569 * OrderCondition: ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "ASCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 54: { /* from SPARQL.bgen, line 580 * OrderCondition: DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "DESCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 57: { /* from SPARQL.bgen, line 597 * OrderCondition: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "UNSPECIFIED_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 59: { /* from SPARQL.bgen, line 615 * LimitClause: LIMIT NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--LimitClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 60: { /* from SPARQL.bgen, line 628 * OffsetClause: OFFSET NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--OffsetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 61: { /* from SPARQL.bgen, line 645 * GroupGraphPattern: LEFT_CURLY RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 62: { /* from SPARQL.bgen, line 653 * GroupGraphPattern: LEFT_CURLY Triples GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 63: { /* from SPARQL.bgen, line 662 * GroupGraphPattern: LEFT_CURLY Triples RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 64: { /* from SPARQL.bgen, line 671 * GroupGraphPattern: LEFT_CURLY GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 65: { /* from SPARQL.bgen, line 684 * GraphPatternList: GraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 66: { /* from SPARQL.bgen, line 692 * GraphPatternList: GraphPatternList GraphPattern */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 67: { /* from SPARQL.bgen, line 739 * GraphPattern: Filter Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 68: { /* from SPARQL.bgen, line 747 * GraphPattern: Filter DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 69: { /* from SPARQL.bgen, line 756 * GraphPattern: Filter DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 70: { /* from SPARQL.bgen, line 764 * GraphPattern: Filter */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 71: { /* from SPARQL.bgen, line 771 * GraphPattern: GraphPatternNotTriples Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 72: { /* from SPARQL.bgen, line 779 * GraphPattern: GraphPatternNotTriples DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 73: { /* from SPARQL.bgen, line 788 * GraphPattern: GraphPatternNotTriples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 74: { /* from SPARQL.bgen, line 795 * GraphPattern: GraphPatternNotTriples DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 75: { /* from SPARQL.bgen, line 847 * GraphPatternNotTriples: OPTIONAL GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedOptionalGraphPattern", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 76: { /* from SPARQL.bgen, line 854 * GraphPatternNotTriples: GroupGraphPattern */ yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 77: { /* from SPARQL.bgen, line 860 * GraphPatternNotTriples: GroupGraphPattern AlternativeGroupGraphPatterns */ yyval = PyObject_CallMethod(GraphPattern, "ParsedAlternativeGraphPattern", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 78: { /* from SPARQL.bgen, line 867 * GraphPatternNotTriples: GRAPH Var GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 79: { /* from SPARQL.bgen, line 875 * GraphPatternNotTriples: GRAPH BlankNode GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 80: { /* from SPARQL.bgen, line 883 * GraphPatternNotTriples: GRAPH IRIref GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 81: { /* from SPARQL.bgen, line 898 * AlternativeGroupGraphPatterns: UNION GroupGraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 82: { /* from SPARQL.bgen, line 907 * AlternativeGroupGraphPatterns: AlternativeGroupGraphPatterns UNION GroupGraphPattern */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 84: { /* from SPARQL.bgen, line 934 * ConditionalOrExpression: ConditionalAndExpression ConditionalAndExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedConditionalAndExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalOrExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 85: { /* from SPARQL.bgen, line 946 * ConditionalAndExpressionList: DOUBLE_PIPE ConditionalAndExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 86: { /* from SPARQL.bgen, line 955 * ConditionalAndExpressionList: ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 87: { /* from SPARQL.bgen, line 970 * ConditionalAndExpression: RelationalExpression ValueLogicalList */ yyval = PyObject_CallMethod(Expression, "ParsedRelationalExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 89: { /* from SPARQL.bgen, line 987 * ValueLogicalList: DOUBLE_AMPERSAND RelationalExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 90: { /* from SPARQL.bgen, line 996 * ValueLogicalList: ValueLogicalList DOUBLE_AMPERSAND RelationalExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 92: { /* from SPARQL.bgen, line 1024 * RelationalExpression: AdditiveExpression EQUALITY_OP AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "EqualityOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 93: { /* from SPARQL.bgen, line 1032 * RelationalExpression: AdditiveExpression NOT_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "NotEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 94: { /* from SPARQL.bgen, line 1040 * RelationalExpression: AdditiveExpression LESS_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 95: { /* from SPARQL.bgen, line 1048 * RelationalExpression: AdditiveExpression GREATER_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 96: { /* from SPARQL.bgen, line 1056 * RelationalExpression: AdditiveExpression LESS_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 97: { /* from SPARQL.bgen, line 1064 * RelationalExpression: AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 99: { /* from SPARQL.bgen, line 1083 * AdditiveExpression: MultiplicativeExpression MultiplicativeExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedAdditiveExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); /*yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2]);*/ if (self->verbose) { fprintf(stderr, "--AdditiveExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 100: { /* from SPARQL.bgen, line 1095 * MultiplicativeExpressionList: PLUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 101: { /* from SPARQL.bgen, line 1105 * MultiplicativeExpressionList: MINUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 102: { /* from SPARQL.bgen, line 1115 * MultiplicativeExpressionList: MultiplicativeExpressionList MINUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 103: { /* from SPARQL.bgen, line 1125 * MultiplicativeExpressionList: MultiplicativeExpressionList PLUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 105: { /* from SPARQL.bgen, line 1146 * MultiplicativeExpression: UnaryExpression UnaryExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedMultiplicativeExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 106: { /* from SPARQL.bgen, line 1157 * UnaryExpressionList: ASTERISK UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 107: { /* from SPARQL.bgen, line 1166 * UnaryExpressionList: FORWARDSLASH UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 108: { /* from SPARQL.bgen, line 1175 * UnaryExpressionList: UnaryExpressionList ASTERISK UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 109: { /* from SPARQL.bgen, line 1185 * UnaryExpressionList: UnaryExpressionList FORWARDSLASH UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 110: { /* from SPARQL.bgen, line 1200 * UnaryExpression: BANG PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "LogicalNegation", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 111: { /* from SPARQL.bgen, line 1207 * UnaryExpression: PLUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericPositive", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 112: { /* from SPARQL.bgen, line 1214 * UnaryExpression: MINUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericNegative", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 114: { /* from SPARQL.bgen, line 1241 * BuiltInCall: STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "STR"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 115: { /* from SPARQL.bgen, line 1251 * BuiltInCall: LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANG"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 116: { /* from SPARQL.bgen, line 1261 * BuiltInCall: LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANGMATCHES"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OOO", funcName,value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 117: { /* from SPARQL.bgen, line 1273 * BuiltInCall: DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "DATATYPE"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 118: { /* from SPARQL.bgen, line 1283 * BuiltInCall: BOUND LEFT_PAREN Var RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "BOUND"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 119: { /* from SPARQL.bgen, line 1293 * BuiltInCall: isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isIRI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 120: { /* from SPARQL.bgen, line 1303 * BuiltInCall: isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isURI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 121: { /* from SPARQL.bgen, line 1313 * BuiltInCall: isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isBLANK"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 122: { /* from SPARQL.bgen, line 1323 * BuiltInCall: isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isLITERAL"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 124: { /* from SPARQL.bgen, line 1341 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OO", value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 125: { /* from SPARQL.bgen, line 1352 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OOO", value_ptr[3],value_ptr[5],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[8], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); Py_DECREF(value_ptr[8]); break; } case 126: { /* from SPARQL.bgen, line 1370 * FunctionCall: IRIref LEFT_PAREN ArgumentList RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], value_ptr[3]); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 127: { /* from SPARQL.bgen, line 1379 * FunctionCall: IRIref NIL */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], PyList_New(0)); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 128: { /* from SPARQL.bgen, line 1396 * ArgumentList: ConditionalOrExpression */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 129: { /* from SPARQL.bgen, line 1405 * ArgumentList: ConditionalOrExpression COMMA ArgumentList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 130: { /* from SPARQL.bgen, line 1421 * PrimaryExpression: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--PrimaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 139: { /* from SPARQLTurtleSuperSet.bgen.frag, line 8 * Filter: FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(Filter, "ParsedExpressionFilter", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 140: { /* from SPARQLTurtleSuperSet.bgen.frag, line 17 * Filter: FILTER BuiltInCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 141: { /* from SPARQLTurtleSuperSet.bgen.frag, line 24 * Filter: FILTER FunctionCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 142: { /* from SPARQLTurtleSuperSet.bgen.frag, line 40 * Triples: Triples DOT TriplesSameSubject */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 144: { /* from SPARQLTurtleSuperSet.bgen.frag, line 54 * Triples: TriplesSameSubject */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 145: { /* from SPARQLTurtleSuperSet.bgen.frag, line 80 * TriplesSameSubject: Var PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 146: { /* from SPARQLTurtleSuperSet.bgen.frag, line 87 * TriplesSameSubject: GraphTerm PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 147: { /* from SPARQLTurtleSuperSet.bgen.frag, line 94 * TriplesSameSubject: LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList */ yyval = PyObject_CallMethod(Resource, "TwiceReferencedBlankNode", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 148: { /* from SPARQLTurtleSuperSet.bgen.frag, line 103 * TriplesSameSubject: Collection PropertyListNotEmpty */ PyObject_CallMethod(value_ptr[1], "setPropertyValueList", "O", value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 151: { /* from SPARQLTurtleSuperSet.bgen.frag, line 127 * PropertyList: */ yyval = PyList_New(0); if (self->verbose) { fprintf(stderr, "--PropertyList("); fprintf(stderr, ")\n"); } break; } case 152: { /* from SPARQLTurtleSuperSet.bgen.frag, line 135 * PropertyListNotEmpty: Verb ObjectList */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 153: { /* from SPARQLTurtleSuperSet.bgen.frag, line 143 * PropertyListNotEmpty: Verb ObjectList SEMICOLON PropertyList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2]),value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 154: { /* from SPARQLTurtleSuperSet.bgen.frag, line 159 * ObjectList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 155: { /* from SPARQLTurtleSuperSet.bgen.frag, line 167 * ObjectList: ObjectList COMMA GraphNode */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 161: { /* from SPARQLTurtleSuperSet.bgen.frag, line 210 * Verb: A */ yyval = PyObject_GetAttrString(RDF, "type"); if (self->verbose) { fprintf(stderr, "--Verb("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 163: { /* from SPARQLTurtleSuperSet.bgen.frag, line 228 * TriplesNode: LEFT_SQUARE PropertyList RIGHT_SQUARE */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Resource, "Resource", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 164: { /* from SPARQLTurtleSuperSet.bgen.frag, line 244 * Collection: LEFT_PAREN GraphNodeList RIGHT_PAREN */ yyval = PyObject_CallMethod(Resource, "ParsedCollection", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Collection("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 165: { /* from SPARQLTurtleSuperSet.bgen.frag, line 256 * GraphNodeList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 166: { /* from SPARQLTurtleSuperSet.bgen.frag, line 264 * GraphNodeList: GraphNodeList GraphNode */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 167: { /* from SPARQLTurtleSuperSet.bgen.frag, line 278 * Var: VARNAME */ PyObject *varName = PySequence_GetSlice(value_ptr[1], 1, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "Variable", "O", varName); Py_XDECREF(varName); if (self->verbose) { fprintf(stderr, "--Var("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 172: { /* from SPARQLTurtleSuperSet.bgen.frag, line 312 * GraphTerm: MINUS NumericLiteral */ PyObject *negNum = PyNumber_Negative(PyObject_CallMethod(value_ptr[2],"toPython",NULL)); yyval = PyObject_CallMethod(rdflib, "Literal", "O", negNum); Py_XDECREF(negNum); if (self->verbose) { fprintf(stderr, "--GraphTerm("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 176: { /* from SPARQLTurtleSuperSet.bgen.frag, line 340 * NumericLiteral: INTEGER */ PyObject *num = PyNumber_Int(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 177: { /* from SPARQLTurtleSuperSet.bgen.frag, line 349 * NumericLiteral: DECIMAL */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 178: { /* from SPARQLTurtleSuperSet.bgen.frag, line 358 * NumericLiteral: DOUBLE */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 179: { /* from SPARQLTurtleSuperSet.bgen.frag, line 371 * RDFLiteral: String */ yyval = PyObject_CallMethod(rdflib, "Literal", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 180: { /* from SPARQLTurtleSuperSet.bgen.frag, line 377 * RDFLiteral: String LANGTAG */ PyObject *lang = PySequence_GetSlice(value_ptr[2], 1, PyString_GET_SIZE(value_ptr[2])); yyval = PyObject_CallMethod(rdflib, "Literal", "OO", value_ptr[1], lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 181: { /* from SPARQLTurtleSuperSet.bgen.frag, line 386 * RDFLiteral: String DOUBLE_HAT IRIref */ yyval = PyObject_CallMethod(Expression, "ParsedDatatypedLiteral", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 184: { /* from SPARQLTurtleSuperSet.bgen.frag, line 413 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 185: { /* from SPARQLTurtleSuperSet.bgen.frag, line 421 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 186: { /* from SPARQLTurtleSuperSet.bgen.frag, line 429 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 187: { /* from SPARQLTurtleSuperSet.bgen.frag, line 437 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 188: { /* from SPARQLTurtleSuperSet.bgen.frag, line 446 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 189: { /* from SPARQLTurtleSuperSet.bgen.frag, line 453 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 190: { /* from SPARQLTurtleSuperSet.bgen.frag, line 460 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 191: { /* from SPARQLTurtleSuperSet.bgen.frag, line 467 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 192: { /* from SPARQLTurtleSuperSet.bgen.frag, line 480 * BlankNode: ANON */ yyval = PyObject_CallMethod(rdflib, "Variable", "O", PyObject_CallMethod(rdflib, "BNode","")); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 193: { /* from SPARQLTurtleSuperSet.bgen.frag, line 491 * BlankNode: BLANK_NODE_LABEL */ PyObject *lang = PySequence_GetSlice(value_ptr[1], 2, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "BNode", "O",lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } } if (!yyval) { lexer_free(lexer); return NULL; } *++value_ptr = yyval; if (self->verbose) print_state_stack(state_stack, state_ptr); /* Now "shift" the result of the reduction. */ /* Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = derives[yyn] - YYNTBASE; yystate = goto_idx[yyn] + *state_ptr; if (yystate >= 0 && yystate <= YYLAST && yycheck[yystate] == *state_ptr) { yystate = yytable[yystate]; } else { yystate = default_goto[yyn]; } continue; } yyn = yytable[yyn]; /* yyn is what to do for this token type in this state. Negative => reduce, -yyn is rule number. Positive => shift, yyn is new state. New state is final state => don't bother to shift just return success. 0, or max negative number => error. */ if (yyn > YYFLAG && yyn < 0) { yyn = -yyn; /* Do a reduction. yyn is the number of a rule to reduce with. */ if (self->verbose) print_reduce(yyn); yylen = rhs_size[yyn]; state_ptr -= yylen; value_ptr -= yylen; if (yylen > 0) yyval = value_ptr[1]; /* Action routines */ switch (yyn) { case 1: { /* from SPARQL.bgen, line 41 * Query: Prolog QueryTypes */ yyval = PyObject_CallMethod(Query, "Query", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Query("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 2: { /* from SPARQL.bgen, line 52 * QueryTypes: SelectQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 5: { /* from SPARQL.bgen, line 65 * QueryTypes: AskQuery */ yyval = value_ptr[1]; Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--QueryTypes("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 6: { /* from SPARQL.bgen, line 78 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 7: { /* from SPARQL.bgen, line 88 * DescribeQuery: DESCRIBE VAR_REFERENCES SolutionModifier */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,Py_None,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 8: { /* from SPARQL.bgen, line 98 * DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],value_ptr[3],Py_None,value_ptr[5]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 9: { /* from SPARQL.bgen, line 108 * DescribeQuery: DESCRIBE VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--DescribeQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 10: { /* from SPARQL.bgen, line 123 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],value_ptr[5],value_ptr[6],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); break; } case 11: { /* from SPARQL.bgen, line 135 * ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", value_ptr[3],Py_None,value_ptr[5],value_ptr[6]); if (self->verbose) { fprintf(stderr, "--ConstructQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 12: { /* from SPARQL.bgen, line 152 * Prolog: BaseDecl PrefixDeclList */ yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 13: { /* from SPARQL.bgen, line 159 * Prolog: BaseDecl */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", value_ptr[1], Py_None); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 14: { /* from SPARQL.bgen, line 166 * Prolog: PrefixDeclList */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "Prolog", "OO", Py_None, value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Prolog("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 15: { /* from SPARQL.bgen, line 173 * Prolog: */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--Prolog("); fprintf(stderr, ")\n"); } break; } case 16: { /* from SPARQL.bgen, line 192 * PrefixDeclList: PrefixDecl */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 17: { /* from SPARQL.bgen, line 200 * PrefixDeclList: PrefixDeclList PrefixDecl */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--PrefixDeclList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 18: { /* from SPARQL.bgen, line 216 * PrefixDecl: PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 19: { /* from SPARQL.bgen, line 226 * PrefixDecl: PREFIX PNAME_NS LESS_THAN GREATER_THAN */ PyObject *t = PyObject_GetAttrString(Bindings, "EMPTY_STRING"); yyval = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--PrefixDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 20: { /* from SPARQL.bgen, line 242 * BaseDecl: BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(Bindings, "BaseDeclaration", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BaseDecl("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 21: { /* from SPARQL.bgen, line 256 * AskQuery: ASK WhereClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "AskQuery", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 22: { /* from SPARQL.bgen, line 264 * AskQuery: ASK DataSetClauseList WhereClause */ yyval = PyObject_CallMethod(Query, "AskQuery", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--AskQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 23: { /* from SPARQL.bgen, line 277 * SelectQuery: SELECT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],Py_None,value_ptr[3],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 24: { /* from SPARQL.bgen, line 287 * SelectQuery: SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOO", value_ptr[2],value_ptr[3],value_ptr[4],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 25: { /* from SPARQL.bgen, line 297 * SelectQuery: SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],Py_None,value_ptr[4],value_ptr[5],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); break; } case 26: { /* from SPARQL.bgen, line 308 * SelectQuery: SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier */ yyval = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", value_ptr[3],value_ptr[4],value_ptr[5],value_ptr[6],1); if (self->verbose) { fprintf(stderr, "--SelectQuery("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 28: { /* from SPARQL.bgen, line 327 * VAR_REFERENCES: ASTERISK */ yyval = Py_None; if (self->verbose) { fprintf(stderr, "--VAR_REFERENCES("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 29: { /* from SPARQL.bgen, line 337 * VariableReferenceList: Var */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 30: { /* from SPARQL.bgen, line 345 * VariableReferenceList: VariableReferenceList Var */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--VariableReferenceList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 31: { /* from SPARQL.bgen, line 361 * IRIref: LESS_THAN Q_IRI_CONTENT GREATER_THAN */ yyval = PyObject_CallMethod(IRIRef, "IRIRef", "O",value_ptr[2]); if (self->verbose) { fprintf(stderr, "--IRIref("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 33: { /* from SPARQL.bgen, line 384 * PrefixedName: PNAME_NS */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 34: { /* from SPARQL.bgen, line 390 * PrefixedName: PNAME_LN */ yyval = PyObject_CallMethod(QName, "QName", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--PrefixedName("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 35: { /* from SPARQL.bgen, line 400 * DataSetClauseList: DataSetClause */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 36: { /* from SPARQL.bgen, line 408 * DataSetClauseList: DataSetClauseList DataSetClause */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--DataSetClauseList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 37: { /* from SPARQL.bgen, line 422 * DataSetClause: FROM IRIref */ yyval = PyObject_CallMethod(IRIRef, "RemoteGraph", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 38: { /* from SPARQL.bgen, line 429 * DataSetClause: FROM NAMED IRIref */ yyval = PyObject_CallMethod(IRIRef, "NamedGraph", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--DataSetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 39: { /* from SPARQL.bgen, line 442 * WhereClause: WHERE GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 40: { /* from SPARQL.bgen, line 449 * WhereClause: GroupGraphPattern */ yyval = PyObject_CallMethod(Query, "WhereClause", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--WhereClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 41: { /* from SPARQL.bgen, line 461 * SolutionModifier: */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", ""); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); fprintf(stderr, ")\n"); } break; } case 42: { /* from SPARQL.bgen, line 466 * SolutionModifier: OrderClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "O",value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 43: { /* from SPARQL.bgen, line 472 * SolutionModifier: OrderClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OO",value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 44: { /* from SPARQL.bgen, line 479 * SolutionModifier: OrderClause LimitClause OffsetClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 45: { /* from SPARQL.bgen, line 487 * SolutionModifier: OrderClause OffsetClause LimitClause */ yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 46: { /* from SPARQL.bgen, line 495 * SolutionModifier: LimitClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 47: { /* from SPARQL.bgen, line 503 * SolutionModifier: OrderClause OffsetClause */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",value_ptr[1],Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 48: { /* from SPARQL.bgen, line 511 * SolutionModifier: OffsetClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,Py_None,value_ptr[1]); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 49: { /* from SPARQL.bgen, line 519 * SolutionModifier: LimitClause */ Py_INCREF(Py_None); Py_INCREF(Py_None); yyval = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,value_ptr[1],Py_None); if (self->verbose) { fprintf(stderr, "--SolutionModifier("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 50: { /* from SPARQL.bgen, line 532 * OrderClause: ORDER BY OrderConditionList */ yyval = value_ptr[3]; Py_INCREF(value_ptr[3]); if (self->verbose) { fprintf(stderr, "--OrderClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 51: { /* from SPARQL.bgen, line 545 * OrderConditionList: OrderCondition */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 52: { /* from SPARQL.bgen, line 553 * OrderConditionList: OrderConditionList OrderCondition */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--OrderConditionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 53: { /* from SPARQL.bgen, line 569 * OrderCondition: ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "ASCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 54: { /* from SPARQL.bgen, line 580 * OrderCondition: DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "DESCENDING_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[3],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 57: { /* from SPARQL.bgen, line 597 * OrderCondition: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *t = PyObject_GetAttrString(SolutionModifier, "UNSPECIFIED_ORDER"); yyval = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",value_ptr[2],t); Py_XDECREF(t); if (self->verbose) { fprintf(stderr, "--OrderCondition("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 59: { /* from SPARQL.bgen, line 615 * LimitClause: LIMIT NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--LimitClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 60: { /* from SPARQL.bgen, line 628 * OffsetClause: OFFSET NumericLiteral */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--OffsetClause("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 61: { /* from SPARQL.bgen, line 645 * GroupGraphPattern: LEFT_CURLY RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 62: { /* from SPARQL.bgen, line 653 * GroupGraphPattern: LEFT_CURLY Triples GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 63: { /* from SPARQL.bgen, line 662 * GroupGraphPattern: LEFT_CURLY Triples RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",value_ptr[2],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 64: { /* from SPARQL.bgen, line 671 * GroupGraphPattern: LEFT_CURLY GraphPatternList RIGHT_CURLY */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GroupGraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 65: { /* from SPARQL.bgen, line 684 * GraphPatternList: GraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 66: { /* from SPARQL.bgen, line 692 * GraphPatternList: GraphPatternList GraphPattern */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 67: { /* from SPARQL.bgen, line 739 * GraphPattern: Filter Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 68: { /* from SPARQL.bgen, line 747 * GraphPattern: Filter DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 69: { /* from SPARQL.bgen, line 756 * GraphPattern: Filter DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 70: { /* from SPARQL.bgen, line 764 * GraphPattern: Filter */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,value_ptr[1],Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 71: { /* from SPARQL.bgen, line 771 * GraphPattern: GraphPatternNotTriples Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[2]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 72: { /* from SPARQL.bgen, line 779 * GraphPattern: GraphPatternNotTriples DOT Triples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,value_ptr[3]); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 73: { /* from SPARQL.bgen, line 788 * GraphPattern: GraphPatternNotTriples */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 74: { /* from SPARQL.bgen, line 795 * GraphPattern: GraphPatternNotTriples DOT */ yyval = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",value_ptr[1],Py_None,Py_None); Py_INCREF(Py_None); if (self->verbose) { fprintf(stderr, "--GraphPattern("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 75: { /* from SPARQL.bgen, line 847 * GraphPatternNotTriples: OPTIONAL GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedOptionalGraphPattern", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 76: { /* from SPARQL.bgen, line 854 * GraphPatternNotTriples: GroupGraphPattern */ yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 77: { /* from SPARQL.bgen, line 860 * GraphPatternNotTriples: GroupGraphPattern AlternativeGroupGraphPatterns */ yyval = PyObject_CallMethod(GraphPattern, "ParsedAlternativeGraphPattern", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 78: { /* from SPARQL.bgen, line 867 * GraphPatternNotTriples: GRAPH Var GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 79: { /* from SPARQL.bgen, line 875 * GraphPatternNotTriples: GRAPH BlankNode GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 80: { /* from SPARQL.bgen, line 883 * GraphPatternNotTriples: GRAPH IRIref GroupGraphPattern */ yyval = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", value_ptr[2],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--GraphPatternNotTriples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 81: { /* from SPARQL.bgen, line 898 * AlternativeGroupGraphPatterns: UNION GroupGraphPattern */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 82: { /* from SPARQL.bgen, line 907 * AlternativeGroupGraphPatterns: AlternativeGroupGraphPatterns UNION GroupGraphPattern */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--AlternativeGroupGraphPatterns("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 84: { /* from SPARQL.bgen, line 934 * ConditionalOrExpression: ConditionalAndExpression ConditionalAndExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedConditionalAndExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalOrExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 85: { /* from SPARQL.bgen, line 946 * ConditionalAndExpressionList: DOUBLE_PIPE ConditionalAndExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 86: { /* from SPARQL.bgen, line 955 * ConditionalAndExpressionList: ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ConditionalAndExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 87: { /* from SPARQL.bgen, line 970 * ConditionalAndExpression: RelationalExpression ValueLogicalList */ yyval = PyObject_CallMethod(Expression, "ParsedRelationalExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--ConditionalAndExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 89: { /* from SPARQL.bgen, line 987 * ValueLogicalList: DOUBLE_AMPERSAND RelationalExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 90: { /* from SPARQL.bgen, line 996 * ValueLogicalList: ValueLogicalList DOUBLE_AMPERSAND RelationalExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ValueLogicalList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 92: { /* from SPARQL.bgen, line 1024 * RelationalExpression: AdditiveExpression EQUALITY_OP AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "EqualityOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 93: { /* from SPARQL.bgen, line 1032 * RelationalExpression: AdditiveExpression NOT_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "NotEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 94: { /* from SPARQL.bgen, line 1040 * RelationalExpression: AdditiveExpression LESS_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 95: { /* from SPARQL.bgen, line 1048 * RelationalExpression: AdditiveExpression GREATER_THAN AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 96: { /* from SPARQL.bgen, line 1056 * RelationalExpression: AdditiveExpression LESS_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "LessThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 97: { /* from SPARQL.bgen, line 1064 * RelationalExpression: AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression */ yyval = PyObject_CallMethod(Operators, "GreaterThanOrEqualOperator", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RelationalExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 99: { /* from SPARQL.bgen, line 1083 * AdditiveExpression: MultiplicativeExpression MultiplicativeExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedAdditiveExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); /*yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2]);*/ if (self->verbose) { fprintf(stderr, "--AdditiveExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 100: { /* from SPARQL.bgen, line 1095 * MultiplicativeExpressionList: PLUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 101: { /* from SPARQL.bgen, line 1105 * MultiplicativeExpressionList: MINUS MultiplicativeExpression */ yyval = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[1], value_ptr[2]); PyList_SET_ITEM(yyval, 0, mList); Py_INCREF(mList); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 102: { /* from SPARQL.bgen, line 1115 * MultiplicativeExpressionList: MultiplicativeExpressionList MINUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 103: { /* from SPARQL.bgen, line 1125 * MultiplicativeExpressionList: MultiplicativeExpressionList PLUS MultiplicativeExpression */ PyList_Append(value_ptr[1], PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",value_ptr[2], value_ptr[3])); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--MultiplicativeExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 105: { /* from SPARQL.bgen, line 1146 * MultiplicativeExpression: UnaryExpression UnaryExpressionList */ yyval = PyObject_CallMethod(Expression, "ParsedMultiplicativeExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--MultiplicativeExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 106: { /* from SPARQL.bgen, line 1157 * UnaryExpressionList: ASTERISK UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 107: { /* from SPARQL.bgen, line 1166 * UnaryExpressionList: FORWARDSLASH UnaryExpression */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[2]); Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 108: { /* from SPARQL.bgen, line 1175 * UnaryExpressionList: UnaryExpressionList ASTERISK UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 109: { /* from SPARQL.bgen, line 1185 * UnaryExpressionList: UnaryExpressionList FORWARDSLASH UnaryExpression */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--UnaryExpressionList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 110: { /* from SPARQL.bgen, line 1200 * UnaryExpression: BANG PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "LogicalNegation", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 111: { /* from SPARQL.bgen, line 1207 * UnaryExpression: PLUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericPositive", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 112: { /* from SPARQL.bgen, line 1214 * UnaryExpression: MINUS PrimaryExpression */ yyval = PyObject_CallMethod(Operators, "NumericNegative", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--UnaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 114: { /* from SPARQL.bgen, line 1241 * BuiltInCall: STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "STR"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 115: { /* from SPARQL.bgen, line 1251 * BuiltInCall: LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANG"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 116: { /* from SPARQL.bgen, line 1261 * BuiltInCall: LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANGMATCHES"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OOO", funcName,value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 117: { /* from SPARQL.bgen, line 1273 * BuiltInCall: DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "DATATYPE"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 118: { /* from SPARQL.bgen, line 1283 * BuiltInCall: BOUND LEFT_PAREN Var RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "BOUND"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 119: { /* from SPARQL.bgen, line 1293 * BuiltInCall: isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isIRI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 120: { /* from SPARQL.bgen, line 1303 * BuiltInCall: isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isURI"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 121: { /* from SPARQL.bgen, line 1313 * BuiltInCall: isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isBLANK"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 122: { /* from SPARQL.bgen, line 1323 * BuiltInCall: isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isLITERAL"); yyval = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,value_ptr[3]); if (self->verbose) { fprintf(stderr, "--BuiltInCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 124: { /* from SPARQL.bgen, line 1341 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OO", value_ptr[3],value_ptr[5]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); break; } case 125: { /* from SPARQL.bgen, line 1352 * RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OOO", value_ptr[3],value_ptr[5],value_ptr[7]); if (self->verbose) { fprintf(stderr, "--RegexExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[5], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[6], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[7], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[8], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); Py_DECREF(value_ptr[5]); Py_DECREF(value_ptr[6]); Py_DECREF(value_ptr[7]); Py_DECREF(value_ptr[8]); break; } case 126: { /* from SPARQL.bgen, line 1370 * FunctionCall: IRIref LEFT_PAREN ArgumentList RIGHT_PAREN */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], value_ptr[3]); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 127: { /* from SPARQL.bgen, line 1379 * FunctionCall: IRIref NIL */ yyval = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", value_ptr[1], PyList_New(0)); if (self->verbose) { fprintf(stderr, "--FunctionCall("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 128: { /* from SPARQL.bgen, line 1396 * ArgumentList: ConditionalOrExpression */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 129: { /* from SPARQL.bgen, line 1405 * ArgumentList: ConditionalOrExpression COMMA ArgumentList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--ArgumentList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 130: { /* from SPARQL.bgen, line 1421 * PrimaryExpression: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = value_ptr[2]; Py_INCREF(value_ptr[2]); if (self->verbose) { fprintf(stderr, "--PrimaryExpression("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 139: { /* from SPARQLTurtleSuperSet.bgen.frag, line 8 * Filter: FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN */ yyval = PyObject_CallMethod(Filter, "ParsedExpressionFilter", "O", value_ptr[3]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 140: { /* from SPARQLTurtleSuperSet.bgen.frag, line 17 * Filter: FILTER BuiltInCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 141: { /* from SPARQLTurtleSuperSet.bgen.frag, line 24 * Filter: FILTER FunctionCall */ yyval = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Filter("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 142: { /* from SPARQLTurtleSuperSet.bgen.frag, line 40 * Triples: Triples DOT TriplesSameSubject */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 144: { /* from SPARQLTurtleSuperSet.bgen.frag, line 54 * Triples: TriplesSameSubject */ yyval = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--Triples("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 145: { /* from SPARQLTurtleSuperSet.bgen.frag, line 80 * TriplesSameSubject: Var PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 146: { /* from SPARQLTurtleSuperSet.bgen.frag, line 87 * TriplesSameSubject: GraphTerm PropertyListNotEmpty */ yyval = PyObject_CallMethod(Resource, "Resource", "OO", value_ptr[1],value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 147: { /* from SPARQLTurtleSuperSet.bgen.frag, line 94 * TriplesSameSubject: LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList */ yyval = PyObject_CallMethod(Resource, "TwiceReferencedBlankNode", "OO", value_ptr[2],value_ptr[4]); if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 148: { /* from SPARQLTurtleSuperSet.bgen.frag, line 103 * TriplesSameSubject: Collection PropertyListNotEmpty */ PyObject_CallMethod(value_ptr[1], "setPropertyValueList", "O", value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--TriplesSameSubject("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 151: { /* from SPARQLTurtleSuperSet.bgen.frag, line 127 * PropertyList: */ yyval = PyList_New(0); if (self->verbose) { fprintf(stderr, "--PropertyList("); fprintf(stderr, ")\n"); } break; } case 152: { /* from SPARQLTurtleSuperSet.bgen.frag, line 135 * PropertyListNotEmpty: Verb ObjectList */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2])); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 153: { /* from SPARQLTurtleSuperSet.bgen.frag, line 143 * PropertyListNotEmpty: Verb ObjectList SEMICOLON PropertyList */ yyval = PyObject_CallMethod(Util, "ListPrepend", "OO", PyObject_CallMethod(Triples, "PropertyValue", "OO", value_ptr[1],value_ptr[2]),value_ptr[4]); if (self->verbose) { fprintf(stderr, "--PropertyListNotEmpty("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[4], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); Py_DECREF(value_ptr[4]); break; } case 154: { /* from SPARQLTurtleSuperSet.bgen.frag, line 159 * ObjectList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 155: { /* from SPARQLTurtleSuperSet.bgen.frag, line 167 * ObjectList: ObjectList COMMA GraphNode */ PyList_Append(value_ptr[1], value_ptr[3]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--ObjectList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 161: { /* from SPARQLTurtleSuperSet.bgen.frag, line 210 * Verb: A */ yyval = PyObject_GetAttrString(RDF, "type"); if (self->verbose) { fprintf(stderr, "--Verb("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 163: { /* from SPARQLTurtleSuperSet.bgen.frag, line 228 * TriplesNode: LEFT_SQUARE PropertyList RIGHT_SQUARE */ Py_INCREF(Py_None); yyval = PyObject_CallMethod(Resource, "Resource", "OO", Py_None,value_ptr[2]); if (self->verbose) { fprintf(stderr, "--TriplesNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 164: { /* from SPARQLTurtleSuperSet.bgen.frag, line 244 * Collection: LEFT_PAREN GraphNodeList RIGHT_PAREN */ yyval = PyObject_CallMethod(Resource, "ParsedCollection", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--Collection("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 165: { /* from SPARQLTurtleSuperSet.bgen.frag, line 256 * GraphNodeList: GraphNode */ yyval = PyList_New(1); PyList_SET_ITEM(yyval, 0, value_ptr[1]); Py_INCREF(value_ptr[1]); if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 166: { /* from SPARQLTurtleSuperSet.bgen.frag, line 264 * GraphNodeList: GraphNodeList GraphNode */ PyList_Append(value_ptr[1], value_ptr[2]); Py_INCREF(value_ptr[1]); yyval = value_ptr[1]; if (self->verbose) { fprintf(stderr, "--GraphNodeList("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 167: { /* from SPARQLTurtleSuperSet.bgen.frag, line 278 * Var: VARNAME */ PyObject *varName = PySequence_GetSlice(value_ptr[1], 1, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "Variable", "O", varName); Py_XDECREF(varName); if (self->verbose) { fprintf(stderr, "--Var("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 172: { /* from SPARQLTurtleSuperSet.bgen.frag, line 312 * GraphTerm: MINUS NumericLiteral */ PyObject *negNum = PyNumber_Negative(PyObject_CallMethod(value_ptr[2],"toPython",NULL)); yyval = PyObject_CallMethod(rdflib, "Literal", "O", negNum); Py_XDECREF(negNum); if (self->verbose) { fprintf(stderr, "--GraphTerm("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 176: { /* from SPARQLTurtleSuperSet.bgen.frag, line 340 * NumericLiteral: INTEGER */ PyObject *num = PyNumber_Int(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 177: { /* from SPARQLTurtleSuperSet.bgen.frag, line 349 * NumericLiteral: DECIMAL */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 178: { /* from SPARQLTurtleSuperSet.bgen.frag, line 358 * NumericLiteral: DOUBLE */ PyObject *num = PyNumber_Float(value_ptr[1]); yyval = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); if (self->verbose) { fprintf(stderr, "--NumericLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 179: { /* from SPARQLTurtleSuperSet.bgen.frag, line 371 * RDFLiteral: String */ yyval = PyObject_CallMethod(rdflib, "Literal", "O", value_ptr[1]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 180: { /* from SPARQLTurtleSuperSet.bgen.frag, line 377 * RDFLiteral: String LANGTAG */ PyObject *lang = PySequence_GetSlice(value_ptr[2], 1, PyString_GET_SIZE(value_ptr[2])); yyval = PyObject_CallMethod(rdflib, "Literal", "OO", value_ptr[1], lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 181: { /* from SPARQLTurtleSuperSet.bgen.frag, line 386 * RDFLiteral: String DOUBLE_HAT IRIref */ yyval = PyObject_CallMethod(Expression, "ParsedDatatypedLiteral", "OO", value_ptr[1],value_ptr[3]); if (self->verbose) { fprintf(stderr, "--RDFLiteral("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 184: { /* from SPARQLTurtleSuperSet.bgen.frag, line 413 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 185: { /* from SPARQLTurtleSuperSet.bgen.frag, line 421 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 186: { /* from SPARQLTurtleSuperSet.bgen.frag, line 429 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 187: { /* from SPARQLTurtleSuperSet.bgen.frag, line 437 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", "O", value_ptr[2]); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[3], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); Py_DECREF(value_ptr[3]); break; } case 188: { /* from SPARQLTurtleSuperSet.bgen.frag, line 446 * String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 189: { /* from SPARQLTurtleSuperSet.bgen.frag, line 453 * String: STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 190: { /* from SPARQLTurtleSuperSet.bgen.frag, line 460 * String: STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 191: { /* from SPARQLTurtleSuperSet.bgen.frag, line 467 * String: STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 */ yyval = PyObject_CallMethod(Expression, "ParsedString", ""); if (self->verbose) { fprintf(stderr, "--String("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ", "); PyObject_Print(value_ptr[2], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); Py_DECREF(value_ptr[2]); break; } case 192: { /* from SPARQLTurtleSuperSet.bgen.frag, line 480 * BlankNode: ANON */ yyval = PyObject_CallMethod(rdflib, "Variable", "O", PyObject_CallMethod(rdflib, "BNode","")); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } case 193: { /* from SPARQLTurtleSuperSet.bgen.frag, line 491 * BlankNode: BLANK_NODE_LABEL */ PyObject *lang = PySequence_GetSlice(value_ptr[1], 2, PyString_GET_SIZE(value_ptr[1])); yyval = PyObject_CallMethod(rdflib, "BNode", "O",lang); Py_XDECREF(lang); if (self->verbose) { fprintf(stderr, "--BlankNode("); PyObject_Print(value_ptr[1], stderr, Py_PRINT_RAW); fprintf(stderr, ")\n"); } Py_DECREF(value_ptr[1]); break; } } if (!yyval) { lexer_free(lexer); return NULL; } *++value_ptr = yyval; if (self->verbose) print_state_stack(state_stack, state_ptr); /* Now "shift" the result of the reduction. */ /* Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ yyn = derives[yyn] - YYNTBASE; yystate = goto_idx[yyn] + *state_ptr; if (yystate >= 0 && yystate <= YYLAST && yycheck[yystate] == *state_ptr) { yystate = yytable[yystate]; } else { yystate = default_goto[yyn]; } continue; } else if (yyn == YYFINAL) { /* Hooray! Process complete. */ lexer_free(lexer); return value_ptr[-1]; } else if (yyn <= 0) { /* Now it is either 0 or YYFLAG */ return report_error(yystate, yylval, lexer); } /* Shift the lookahead token. */ TRACE("Shifting token %d (%s), ", yychar, token_names[yychar1]); if (yychar != YYEOF) { yychar = YYEMPTY; } *++value_ptr = yylval; yystate = yyn; continue; } /* should never get here */ Py_INCREF(Py_None); lexer_free(lexer); return Py_None; } /** lexer routines ****************************************************/ static lexerobject *lexer_new(PyObject *text) { lexerobject *lexer; lexer = PyMem_New(lexerobject, 1); if (lexer == NULL) { PyErr_NoMemory(); return NULL; } /* attempt to coerce given object to unicode using default rules */ lexer->text = PyUnicode_FromObject(text); if (lexer->text == NULL) { PyMem_Free(lexer); return NULL; } lexer->position = PyUnicode_AS_UNICODE(lexer->text); lexer->end = lexer->position + PyUnicode_GET_SIZE(lexer->text); lexer->state = LEXER_START_STATE; /* create initial backtracking stack */ lexer->positions = PyMem_New(Py_UNICODE *, LEXER_INITIAL_BACKTRACKS); if (lexer->positions == NULL) { PyErr_NoMemory(); Py_DECREF(lexer->text); PyMem_Free(lexer); return NULL; } lexer->allocated = LEXER_INITIAL_BACKTRACKS; lexer->backtracks = 0; return lexer; } static void lexer_free(lexerobject *lexer) { PyMem_Free(lexer->positions); Py_DECREF(lexer->text); PyMem_Free(lexer); } static int lexer_save_position(lexerobject *lexer) { Py_UNICODE **positions; size_t new_allocated; int allocated, newsize; /* Bypass realloc() when a previous overallocation is large enough to accommodate the newsize. */ newsize = lexer->backtracks + 1; allocated = lexer->allocated; positions = lexer->positions; if (newsize >= allocated) { /* This over-allocates proportional to the list size, making room * for additional growth. The over-allocation is mild, but is * enough to give linear-time amortized behavior over a long * sequence of appends() in the presence of a poorly-performing * system realloc(). * The growth pattern is: 0, 4, 8, 16, 25, 35, 46, 58, 72, 88, ... */ new_allocated = (newsize >> 3) + (newsize < 9 ? 3 : 6) + newsize; if (PyMem_Resize(positions, Py_UNICODE *, new_allocated) == NULL) { PyErr_NoMemory(); return -1; } lexer->allocated = new_allocated; lexer->positions = positions; } lexer->positions[lexer->backtracks] = lexer->position; lexer->backtracks = newsize; return 0; } static Py_UNICODE *lexer_restore_position(lexerobject *lexer) { assert(lexer->backtracks > 0); lexer->position = lexer->positions[--lexer->backtracks]; return lexer->position; } static int lexer_charset(parserobject *self, Py_UCS4 *set, Py_UCS4 ch, int ok) { unsigned char *charset; /* check if character is a member of the given set */ /* Note, the tests are stored sorted to allow for quick exits */ for (;;) { switch (*set++) { case LEXER_CHARSET_LITERAL: /* */ REGEX_TRACE("CHARSET_LITERAL, %d == %d\n", ch, set[0]); if (ch < set[0]) return !ok; else if ((Py_UCS4)ch == set[0]) return ok; set++; break; case LEXER_CHARSET_RANGE: /* */ REGEX_TRACE("CHARSET_RANGE, %d <= %d <= %d\n", set[0], ch, set[1]); if (ch < set[0]) return !ok; else if (ch <= set[1]) return ok; set += 2; break; case LEXER_CHARSET_SMALL: /* */ REGEX_TRACE("CHARSET_SMALL, index=%d\n", set[0]); charset = lexer_charsets[*set++]; if (ch < 256 && (charset[ch >> 3] & (1 << (ch & 7)))) return ok; break; case LEXER_CHARSET_BIG: /* */ REGEX_TRACE("CHARSET_BIG, index=%d\n", set[0]); charset = lexer_charsets[lexer_blockmaps[*set++][ch >> 8]]; if (ch < 65536 && charset[(ch & 255) >> 3] & (1 << (ch & 7))) return ok; break; case LEXER_CHARSET_FAILURE: /* nothing matched in charset */ REGEX_TRACE("CHARSET_FAILURE\n"); return !ok; default: REGEX_TRACE("**INTERNAL CHARSET ERROR**\n"); return -1; } } } /* return values: 1 -> sucessful match, 0 -> no match, -1 -> error, */ #ifdef Py_UNICODE_WIDE #define GET_CHAR_AND_ADVANCE() ch = *ptr++; #else #define GET_CHAR_AND_ADVANCE() \ if ((0xD800 <= ptr[0] && ptr[0] <= 0xDBFF) && \ (0xDC00 <= ptr[1] && ptr[1] <= 0xDFFF)) { \ ch = (((ptr[0] & 0x03FF) << 10) | (ptr[1] & 0x03FF)) + 0x00010000; \ ptr += 2; \ } else { \ ch = *ptr++; \ } #endif static int lexer_match(parserobject *self, lexerobject *lexer, Py_UCS4 *pattern) { Py_UNICODE *ptr = lexer->position; Py_UNICODE *end; Py_UCS4 ch; int i, count; REGEX_TRACE("LEXER_MATCH, position %d\n", lexer->position - PyUnicode_AS_UNICODE(lexer->text)); while (1) { switch (*pattern++) { case LEXER_OP_FAILURE: /* immediate failure */ REGEX_TRACE("OP_FAILURE\n"); return 0; case LEXER_OP_SUCCESS: /* end of pattern */ REGEX_TRACE("OP_SUCCESS\n"); lexer->position = ptr; return 1; case LEXER_OP_BOL: /* beginning of line */ /* */ REGEX_TRACE("OP_BOL\n"); if (ptr == PyUnicode_AS_UNICODE(lexer->text) || ptr[-1] == '\n') break; return 0; case LEXER_OP_EOL: /* end of line */ /* */ REGEX_TRACE("OP_EOL\n"); if (ptr >= lexer->end || ptr[0] == '\n') break; return 0; case LEXER_OP_EOF: /* end of file */ /* */ REGEX_TRACE("OP_EOF\n"); if (ptr >= lexer->end) break; return 0; case LEXER_OP_ANY: /* match anything (except a newline) */ /* */ REGEX_TRACE("OP_ANY\n"); if (ptr >= lexer->end || ptr[0] == '\n') return 0; ptr++; break; case LEXER_OP_LITERAL: /* match literal character */ /* */ if (ptr >= lexer->end) return 0; GET_CHAR_AND_ADVANCE(); REGEX_TRACE("OP_LITERAL, %d == %d\n", ch, pattern[0]); if (ch != pattern[0]) return 0; pattern++; break; case LEXER_OP_NOT_LITERAL: /* match anything that is not literal character */ /* */ if (ptr >= lexer->end) return 0; GET_CHAR_AND_ADVANCE(); REGEX_TRACE("OP_NOT_LITERAL, %d != %d\n", ch, pattern[0]); if (ch == pattern[0]) return 0; pattern++; break; case LEXER_OP_CHARSET: /* match set member */ /* */ if (ptr >= lexer->end) return 0; GET_CHAR_AND_ADVANCE(); REGEX_TRACE("OP_CHARSET, skip %d\n", pattern[0]); i = lexer_charset(self, pattern + 1, ch, 1); if (i <= 0) return i; pattern += pattern[0]; break; case LEXER_OP_NOT_CHARSET: /* match set non-member */ /* */ if (ptr >= lexer->end) return 0; GET_CHAR_AND_ADVANCE(); REGEX_TRACE("OP_NOT_CHARSET, skip %d\n", pattern[0]); i = lexer_charset(self, pattern + 1, ch, 0); if (i <= 0) return i; pattern += pattern[0]; break; case LEXER_OP_ASSERT: /* lookahead assertion */ /* */ REGEX_TRACE("OP_ASSERT, skip %d\n", pattern[0]); lexer->position = ptr; i = lexer_match(self, lexer, pattern + 1); if (i <= 0) return i; pattern += pattern[0]; break; case LEXER_OP_BRANCH: /* alternation */ /* ... */ end = NULL; count = 0; while (pattern[0]) { /* reset start position each time through */ REGEX_TRACE("OP_BRANCH %d, skip %d\n", count++, pattern[0]); lexer->position = ptr; i = lexer_match(self, lexer, pattern + 1); if (i < 0) return i; else if (i && lexer->position > end) /* successful match which is longer than the current best matched */ end = lexer->position; /* advance to the next pattern */ pattern += pattern[0]; } /* advance pattern past NULL */ pattern++; /* advance to the best matching position if there was a match */ if (end) { lexer->position = ptr = end; break; } return 0; case LEXER_OP_REPEAT: /* repetition */ /* <1=min> item */ { Py_UCS4 *item = pattern + 2; Py_UCS4 *next = pattern + pattern[0]; int minimum = pattern[1]; int backtracks; lexer->position = ptr; for (count = 0, i = 1; i == 1 && count < minimum; count++) { REGEX_TRACE("OP_REPEAT, min %d, now %d\n", minimum, count); i = lexer_match(self, lexer, item); } /* either internal error or failed minimum matches */ if (i <= 0) return i; backtracks = lexer->backtracks; /* match as many items as possible */ for (; i == 1; count++) { REGEX_TRACE("OP_REPEAT, now %d\n", count); if (lexer_save_position(lexer) < 0) return -1; i = lexer_match(self, lexer, item); } if (i < 0) { /* internal error */ lexer->backtracks = backtracks; return i; } /* backtracking assert of tail match until success */ do { REGEX_TRACE("OP_REPEAT, now %d\n", count); /* update position to previous successful match */ ptr = lexer_restore_position(lexer); if (ptr == NULL) return -1; i = lexer_match(self, lexer, next); } while (i == 0 && --count > minimum); /* discard remaining backtrack positions */ lexer->backtracks = backtracks; if (i <= 0) { return i; } pattern = next; } break; case LEXER_OP_REPEAT_RANGE: /* repetition */ /* <1=min> <2=max> item */ { Py_UCS4 *item = pattern + 3; Py_UCS4 *next = pattern + pattern[0]; int minimum = pattern[1]; int maximum = pattern[2]; int backtracks; lexer->position = ptr; for (count = 0, i = 1; i == 1 && count < minimum; count++) { REGEX_TRACE("OP_REPEAT_RANGE, min %d, now %d\n", minimum, count); i = lexer_match(self, lexer, item); } /* either internal error or failed minimum matches */ if (i <= 0) return i; backtracks = lexer->backtracks; /* consume up to 'maximum' matches */ for (; i == 1 && count < maximum; count++) { REGEX_TRACE("OP_REPEAT_RANGE, max %d, now %d\n", maximum, count); if (lexer_save_position(lexer) < 0) return -1; i = lexer_match(self, lexer, item); } if (i < 0) { /* internal error */ lexer->backtracks = backtracks; return i; } /* maximum matches reached, update saved position */ if (i == 1) ptr = lexer->position; /* backtracking assert of tail match until success */ do { REGEX_TRACE("OP_REPEAT_RANGE, now %d\n", count); if (i == 0) { /* update position to last successful match */ ptr = lexer_restore_position(lexer); if (ptr == NULL) return -1; } i = lexer_match(self, lexer, next); } while (i == 0 && --count > minimum); /* discard remaining backtrack positions */ lexer->backtracks = backtracks; if (i <= 0) return i; pattern = next; } break; default: REGEX_TRACE("**INTERNAL MATCH ERROR**\n"); return -1; } } } static int parser_yylex(parserobject *self, lexerobject *lexer, PyObject **yylval) { int yychar = YYEMPTY; int yylen; Py_UNICODE *yytext = lexer->position; while (yytext < lexer->end && yychar == YYEMPTY) { Py_UNICODE *best_end = NULL; int yyaccept = 0; int i; Py_UCS4 **patterns = (Py_UCS4 **)lexer_patterns[lexer->state]; const int *actions = lexer_actions[lexer->state]; REGEX_TRACE("Using patterns from lexer state %d\n", lexer->state); for (i = 0; patterns[i]; i++) { int matched; /* reset position each time through */ lexer->position = yytext; REGEX_TRACE("--- pattern %d...\n", i); matched = lexer_match(self, lexer, patterns[i]); if (matched > 0 && lexer->position > best_end) { /* successful match which is longer than the current best matched */ best_end = lexer->position; yyaccept = i; } else if (matched < 0) { /* internal error */ REGEX_TRACE("--- pattern %d internal error\n", i); PyErr_SetString(PyExc_RuntimeError, "internal error in regular expression engine"); return -1; } REGEX_TRACE("--- pattern %d %s\n", i, matched ? "success" : "failed"); } if (best_end == NULL) { /* no matches */ lexer->position = yytext; lexer_error(lexer); return -1; } lexer->position = best_end; yylen = best_end - yytext; /* get the action block for this match */ switch (actions[yyaccept]) { case 0: { /* from SPARQLLexerPatterns.bgen.frag, line 70 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[70], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = GREATER_THAN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 1: { /* from SPARQLLexerPatterns.bgen.frag, line 74 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[74], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = Q_IRI_CONTENT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 2: { /* from SPARQLLexerPatterns.bgen.frag, line 77 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[77], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); break; } case 3: { /* from SPARQLLexerPatterns.bgen.frag, line 13 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[13], "); lexer->state = LEXER_STRING_MODE_SHORT_1; TRACE("switching to start condition STRING_MODE_SHORT_1, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_1; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 4: { /* from SPARQLLexerPatterns.bgen.frag, line 17 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[17], "); lexer->state = LEXER_STRING_MODE_LONG_1; TRACE("switching to start condition STRING_MODE_LONG_1, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_2; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 5: { /* from SPARQLLexerPatterns.bgen.frag, line 21 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[21], "); lexer->state = LEXER_STRING_MODE_SHORT_2; TRACE("switching to start condition STRING_MODE_SHORT_2, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_3; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 6: { /* from SPARQLLexerPatterns.bgen.frag, line 25 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[25], "); lexer->state = LEXER_STRING_MODE_LONG_2; TRACE("switching to start condition STRING_MODE_LONG_2, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_4; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 7: { /* from SPARQLLexerPatterns.bgen.frag, line 29 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[29], "); lexer->state = LEXER_IRI_MODE; TRACE("switching to start condition IRI_MODE, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LESS_THAN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 8: { /* from SPARQLLexerPatterns.bgen.frag, line 34 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[34], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LANGTAG; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 9: { /* from SPARQLLexerPatterns.bgen.frag, line 37 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[37], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = NIL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 10: { /* from SPARQLLexerPatterns.bgen.frag, line 40 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[40], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = ANON; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 11: { /* from SPARQLLexerPatterns.bgen.frag, line 43 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[43], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = PNAME_LN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 12: { /* from SPARQLLexerPatterns.bgen.frag, line 46 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[46], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = PNAME_NS; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 13: { /* from SPARQLLexerPatterns.bgen.frag, line 49 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[49], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = BLANK_NODE_LABEL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 14: { /* from SPARQLLexerPatterns.bgen.frag, line 52 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[52], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = VARNAME; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 15: { /* from SPARQLLexerPatterns.bgen.frag, line 56 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[56], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LANGTAG; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 16: { /* from SPARQLLexerPatterns.bgen.frag, line 59 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[59], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = INTEGER; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 17: { /* from SPARQLLexerPatterns.bgen.frag, line 62 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[62], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DECIMAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 18: { /* from SPARQLLexerPatterns.bgen.frag, line 65 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[65], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOUBLE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 19: { /* from SPARQLLexerPatterns.bgen.frag, line 121 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[121], "); /* skip over the matched text */ yytext = lexer->position; break; } case 20: { /* from SPARQLLexerPatterns.bgen.frag, line 123 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[123], "); /* skip over the matched text */ yytext = lexer->position; break; } case 21: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 3 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[3], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = UNION; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 22: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 6 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[6], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = ASTERISK; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 23: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 9 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[9], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = BASE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 24: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 12 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[12], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = PREFIX; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 25: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 15 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[15], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = SELECT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 26: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 18 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[18], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DISTINCT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 27: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 21 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[21], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = FROM; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 28: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 24 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[24], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = NAMED; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 29: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 27 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[27], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = OPTIONAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 30: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 30 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[30], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = FILTER; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 31: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 33 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[33], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = GRAPH; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 32: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 36 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[36], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = WHERE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 33: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 39 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[39], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = ORDER; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 34: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 42 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[42], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = BY; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 35: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 45 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[45], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = ASC; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 36: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 48 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[48], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = ASK; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 37: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 51 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[51], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = CONSTRUCT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 38: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 54 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[54], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DESCRIBE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 39: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 57 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[57], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DESC; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 40: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 60 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[60], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LIMIT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 41: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 63 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[63], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = OFFSET; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 42: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 66 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[66], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STR; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 43: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 69 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[69], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LANG; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 44: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 72 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[72], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LANGMATCHES; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 45: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 75 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[75], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DATATYPE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 46: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 78 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[78], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = isIRI; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 47: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 81 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[81], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = isURI; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 48: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 84 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[84], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = isBLANK; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 49: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 87 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[87], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = isLITERAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 50: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 90 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[90], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = BOUND; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 51: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 93 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[93], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = REGEX; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 52: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 96 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[96], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = TRUE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 53: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 99 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[99], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = FALSE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 54: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 102 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[102], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = A; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 55: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 105 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[105], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = MINUS; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 56: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 108 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[108], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = PLUS; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 57: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 111 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[111], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOUBLE_AMPERSAND; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 58: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 114 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[114], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOUBLE_PIPE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 59: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 117 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[117], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = BANG; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 60: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 120 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[120], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOUBLE_HAT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 61: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 123 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[123], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = COMMA; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 62: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 126 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[126], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = COLON; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 63: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 129 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[129], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = QUESTION_MARK; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 64: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 132 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[132], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOLLAR; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 65: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 135 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[135], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = FORWARDSLASH; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 66: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 138 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[138], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LEFT_PAREN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 67: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 141 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[141], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = RIGHT_PAREN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 68: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 144 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[144], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LEFT_SQUARE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 69: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 147 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[147], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = RIGHT_SQUARE; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 70: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 150 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[150], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = EQUALITY_OP; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 71: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 153 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[153], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = NOT_EQUAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 72: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 156 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[156], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LESS_THAN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 73: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 159 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[159], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = GREATER_THAN; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 74: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 162 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[162], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LESS_THAN_EQUAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 75: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 165 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[165], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = GREATER_THAN_EQUAL; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 76: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 168 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[168], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = SEMICOLON; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 77: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 171 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[171], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = LEFT_CURLY; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 78: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 174 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[174], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = RIGHT_CURLY; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 79: { /* from SPARQLLiteralLexerPatterns.bgen.frag, line 177 */ TRACE("using rule from SPARQLLiteralLexerPatterns.bgen.frag[177], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = DOT; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 80: { /* from SPARQLLexerPatterns.bgen.frag, line 92 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[92], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL2; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 81: { /* from SPARQLLexerPatterns.bgen.frag, line 95 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[95], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_3; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 82: { /* from SPARQLLexerPatterns.bgen.frag, line 83 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[83], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL1; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 83: { /* from SPARQLLexerPatterns.bgen.frag, line 86 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[86], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_1; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 84: { /* from SPARQLLexerPatterns.bgen.frag, line 111 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[111], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_LONG2; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 85: { /* from SPARQLLexerPatterns.bgen.frag, line 114 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[114], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_4; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 86: { /* from SPARQLLexerPatterns.bgen.frag, line 102 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[102], "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_LONG1; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } case 87: { /* from SPARQLLexerPatterns.bgen.frag, line 105 */ TRACE("using rule from SPARQLLexerPatterns.bgen.frag[105], "); lexer->state = LEXER_INITIAL; TRACE("switching to start condition INITIAL, "); /* create the Python object for the matched text */ *yylval = PyUnicode_FromUnicode(yytext, yylen); if (*yylval == NULL) { PyErr_NoMemory(); return -1; } yychar = STRING_LITERAL_DELIMETER_2; if (self->verbose) { char *repr = unicode_escape(yytext, yylen); TRACE("accepting '%s' (%d)\n", repr, yychar); PyMem_Del(repr); } /* update the saved position */ yytext = lexer->position; break; } } } if (yychar == YYEMPTY) { /* Reached end of input */ yychar = YYEOF; } return yychar; } /** Type Object *******************************************************/ static int parser_traverse(parserobject *self, visitproc visit, void *arg) { int rv; if (self->dict) { rv = visit(self->dict, arg); if (rv != 0) return rv; } return 0; } static int parser_clear(parserobject *self) { PyObject *tmp; if (self->dict) { tmp = self->dict; self->dict = NULL; Py_DECREF(tmp); } return 0; } static void parser_dealloc(parserobject *self) { parser_clear(self); self->ob_type->tp_free((PyObject *) self); } static int parser_init(parserobject *self, PyObject *args, PyObject *kwds) { PyObject *debug=NULL; static char *kwlist[] = { "debug", NULL }; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|O:" PARSER_NAME, kwlist, &debug)) return -1; if (debug) { self->verbose = PyObject_IsTrue(debug); } return 0; } static PyObject *parser_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { parserobject *self; self = (parserobject *) type->tp_alloc(type, 0); if (self != NULL) { self->dict = PyDict_New(); if (self->dict == NULL) { Py_DECREF(self); return NULL; } self->verbose = 0; } return (PyObject *) self; } static PyMethodDef parser_methods[] = { { "parse", (PyCFunction) parser_parse, METH_O, parse_doc }, { NULL, NULL } }; static PyMemberDef parser_members[] = { { "debug", T_INT, offsetof(parserobject, verbose) }, { NULL } }; static char parser_doc[] = PARSER_NAME "\ ([debug]) -> parser\n\ Create a new parser object.\n\ \n\ The optional debug argument, when true, enables the builtin trace facility.\n\ The trace facility uses stderr to display each step taken by the parser."; static PyTypeObject Parser_Type = { /* PyObject_HEAD */ PyObject_HEAD_INIT(NULL) /* ob_size */ 0, /* tp_name */ PROJECT_NAME "." PARSER_NAME, /* tp_basicsize */ sizeof(parserobject), /* tp_itemsize */ 0, /* tp_dealloc */ (destructor) parser_dealloc, /* tp_print */ (printfunc) 0, /* tp_getattr */ (getattrfunc) 0, /* tp_setattr */ (setattrfunc) 0, /* tp_compare */ (cmpfunc) 0, /* tp_repr */ (reprfunc) 0, /* tp_as_number */ (PyNumberMethods *) 0, /* tp_as_sequence */ (PySequenceMethods *) 0, /* tp_as_mapping */ (PyMappingMethods *) 0, /* tp_hash */ (hashfunc) 0, /* tp_call */ (ternaryfunc) 0, /* tp_str */ (reprfunc) 0, /* tp_getattro */ (getattrofunc) 0, /* tp_setattro */ (setattrofunc) 0, /* tp_as_buffer */ (PyBufferProcs *) 0, /* tp_flags */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_doc */ (char *) parser_doc, /* tp_traverse */ (traverseproc) parser_traverse, /* tp_clear */ (inquiry) parser_clear, /* tp_richcompare */ (richcmpfunc) 0, /* tp_weaklistoffset */ 0, /* tp_iter */ (getiterfunc) 0, /* tp_iternext */ (iternextfunc) 0, /* tp_methods */ (PyMethodDef *) parser_methods, /* tp_members */ (PyMemberDef *) parser_members, /* tp_getset */ (PyGetSetDef *) 0, /* tp_base */ (PyTypeObject *) 0, /* tp_dict */ (PyObject *) 0, /* tp_descr_get */ (descrgetfunc) 0, /* tp_descr_set */ (descrsetfunc) 0, /* tp_dictoffset */ offsetof(parserobject, dict), /* tp_init */ (initproc) parser_init, /* tp_alloc */ (allocfunc) 0, /* tp_new */ (newfunc) parser_new, /* tp_free */ 0, }; /* Helper functions */ /* caller is responsible for releasing the memory */ static char *unicode_escape(Py_UNICODE *s, int len) { static const char *hexdigit = "0123456789ABCDEF"; char *repr, *p; int i, size; /* Do one pass to get the repr'ed size */ size = 1; /* zero terminator */ for (i = 0; i < len; i++) { #ifdef Py_UNICODE_WIDE if (s[i] >= 65536) size += 10; /* \UHHHHHHHH */ else #endif if (s[i] >= 256) size += 6; /* \uHHHH */ else if (s[i] == 9 || s[i] == 10 || s[i] == 13) size += 2; /* \t \n \r */ else if (s[i] < 32 || s[i] >= 128) size += 4; /* \xHH */ else size++; /* printable US-ASCII */ } repr = p = PyMem_New(char, size + 1); if (repr == NULL) return NULL; while (len-- > 0) { Py_UNICODE ch = *s++; #ifdef Py_UNICODE_WIDE /* Map 32-bit characters to '\Uxxxxxxxx' */ if (ch >= 65536) { *p++ = '\\'; *p++ = 'U'; *p++ = hexdigit[(ch >> 28) & 0xf]; *p++ = hexdigit[(ch >> 24) & 0xf]; *p++ = hexdigit[(ch >> 20) & 0xf]; *p++ = hexdigit[(ch >> 16) & 0xf]; *p++ = hexdigit[(ch >> 12) & 0xf]; *p++ = hexdigit[(ch >> 8) & 0xf]; *p++ = hexdigit[(ch >> 4) & 0xf]; *p++ = hexdigit[ch & 15]; } /* Map 16-bit characters to '\uxxxx' */ else #endif if (ch >= 256) { *p++ = '\\'; *p++ = 'u'; *p++ = hexdigit[(ch >> 12) & 0xf]; *p++ = hexdigit[(ch >> 8) & 0xf]; *p++ = hexdigit[(ch >> 4) & 0xf]; *p++ = hexdigit[ch & 15]; } /* Map special whitespace to '\t', \n', '\r' */ else if (ch == 9) { *p++ = '\\'; *p++ = 't'; } else if (ch == 10) { *p++ = '\\'; *p++ = 'n'; } else if (ch == 13) { *p++ = '\\'; *p++ = 'r'; } /* Map non-printable US ASCII to '\xhh' */ else if (ch < 32 || ch >= 128) { *p++ = '\\'; *p++ = 'x'; *p++ = hexdigit[(ch >> 4) & 0xf]; *p++ = hexdigit[ch & 15]; } /* Copy everything else as-is */ else *p++ = (char) ch; } *p = '\0'; return repr; } static void calculate_position(lexerobject *lexer, int *line, int *column) { /* Determine line and column numbers */ Py_UNICODE *p; *line = 1; *column = 1; for (p = PyUnicode_AS_UNICODE(lexer->text); p < lexer->end; p++) { if ((char)*p == '\n') { *line += 1; *column = 1; } else { *column += 1; } } } static const char error_format_str[] = "parse error at line %d, column %d: matched '%s'"; static const char error_format_eof_str[] = "parse error at line %d, column %d: reached end-of-input"; static PyObject *report_error(int state, PyObject* lval, lexerobject *lexer) { int line, column; int ruleno = action_idx[state]; char *matched = NULL; if (lval) { matched = unicode_escape(PyUnicode_AS_UNICODE(lval), PyUnicode_GET_SIZE(lval)); if (matched == NULL) return NULL; } calculate_position(lexer, &line, &column); Py_DECREF(lexer->text); if (ruleno > YYFLAG && ruleno < YYLAST) { /* There are expected tokens */ int x, count; int size = 60; /* Initial format string */ char *msg; /* Start X at -yyn if nec to avoid negative indexes in yycheck. */ for (x = (ruleno < 0 ? -ruleno : 0); x < (sizeof(token_names) / sizeof(char *)); x++) { if (yycheck[x + ruleno] == x) { size += strlen(token_names[x]) + 15; } } msg = PyMem_New(char, size); if (msg == NULL) { PyMem_Del(matched); return NULL; } if (lval) { strcpy(msg, error_format_str); } else { strcpy(msg, error_format_eof_str); } count = 0; for (x = (ruleno < 0 ? -ruleno : 0); x < (sizeof(token_names) / sizeof(char *)); x++) { if (yycheck[x + ruleno] == x) { strcat(msg, count == 0 ? ", expecting '" : " or '"); strcat(msg, token_names[x]); strcat(msg, "'"); count++; } } if (matched) { PyErr_Format(PyExc_SyntaxError, msg, line, column, matched); } else { PyErr_Format(PyExc_SyntaxError, msg, line, column); } PyMem_Del(msg); } else { if (matched) { PyErr_Format(PyExc_SyntaxError, error_format_str, line, column, matched); } else { PyErr_Format(PyExc_SyntaxError, error_format_eof_str, line, column); } } if (matched) { PyMem_Del(matched); } return NULL; } static const char lexer_error_str[] = "lexical error at line %d, column %d: no action found for '%s'"; static void lexer_error(lexerobject *lexer) { int line, column; char *repr = unicode_escape(lexer->position, (lexer->end - lexer->position)); if (repr == NULL) return; calculate_position(lexer, &line, &column); PyErr_Format(PyExc_SyntaxError, lexer_error_str, line, column, repr); PyMem_Del(repr); return; } static void print_reduce(int ruleno) { int count; const int *token; PySys_WriteStderr("Reducing via rule %d (%s), ", ruleno, rule_info[ruleno]); /* print the symbols being reduced and their result. */ count = ruleno; token = rhs_tokens; while (--count) while (*++token); while (*++token) { PySys_WriteStderr("%s ", token_names[*token]); } PySys_WriteStderr("-> %s\n", token_names[derives[ruleno]]); } static void print_state_stack(int *stack, int *end) { int *curr = stack; PySys_WriteStderr("state stack now"); while (curr <= end) { PySys_WriteStderr(" %d", *curr++); } PySys_WriteStderr("\n"); } /** Interactive parser ************************************************/ #define CONSOLE_NAME PARSER_NAME "Console" typedef struct { PyObject_HEAD parserobject *parser; PyObject *dict; } consoleobject; static PyObject *console_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { consoleobject *self; self = (consoleobject *) type->tp_alloc(type, 0); if (self != NULL) { PyObject *args = Py_BuildValue("(i)", 1); if (args == NULL) { Py_DECREF(self); return NULL; } self->parser = (parserobject *) parser_new(&Parser_Type, args, NULL); Py_DECREF(args); if (self->parser == NULL) { Py_DECREF(self); return NULL; } } return (PyObject *) self; } static int console_init(consoleobject *self, PyObject *args, PyObject *kwds) { PyObject *bases, *base, *result; int size, i; if (!PyArg_ParseTuple(args, ":" CONSOLE_NAME)) return -1; bases = self->ob_type->tp_bases; size = PyTuple_GET_SIZE(bases); for (i = 0; i < size; i++) { base = PyTuple_GET_ITEM(bases, i); result = PyObject_CallMethod(base, "__init__", "O", (PyObject *) self); if (result == NULL) return -1; Py_DECREF(result); } return 0; } static void console_dealloc(consoleobject *self) { Py_XDECREF(self->parser); self->ob_type->tp_free((PyObject *) self); } static PyObject *console_cmdloop(consoleobject *self, PyObject *args) { PyObject *result=NULL, *builtins=NULL, *readline=NULL, *old_completer=NULL; int stop; builtins = PyImport_ImportModule("__builtin__"); if (builtins == NULL) goto exit; readline = PyImport_ImportModule("readline"); if (readline == NULL) { if (!PyErr_ExceptionMatches(PyExc_ImportError)) goto exit; PyErr_Clear(); } else { #if PY_VERSION_HEX > 0x02030000 /* old_completer = readline.get_completer() */ old_completer = PyObject_CallMethod(readline, "get_completer", NULL); if (old_completer == NULL) goto exit; #endif /* readline.set_completer(self.complete) */ result = PyObject_GetAttrString((PyObject *) self, "complete"); if (result == NULL) goto exit; result = PyObject_CallMethod(readline, "set_completer", "N", result); if (result == NULL) goto exit; Py_DECREF(result); /* readline.parse_and_bind("tab: complete") */ result = PyObject_CallMethod(readline, "parse_and_bind", "s", "tab: complete"); if (result == NULL) goto exit; Py_DECREF(result); } stop = 0; do { result = PyObject_GetAttrString((PyObject *) self, "prompt"); if (result == NULL) goto finally; result = PyObject_CallMethod(builtins, "raw_input", "N", result); if (result == NULL) { if (PyErr_ExceptionMatches(PyExc_EOFError) || PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) { PyErr_Clear(); PySys_WriteStdout("\n"); Py_INCREF(Py_None); result = Py_None; } goto finally; } result = PyObject_CallMethod((PyObject *) self, "onecmd", "N", result); if (result == NULL) goto finally; stop = PyObject_IsTrue(result); Py_DECREF(result); } while (!stop); Py_INCREF(Py_None); result = Py_None; finally: #if PY_VERSION_HEX > 0x02030000 if (readline != NULL && old_completer != NULL) { PyObject *rv; rv = PyObject_CallMethod(readline, "set_completer", "O", old_completer); if (rv == NULL) { Py_XDECREF(result); result = rv; } else { Py_DECREF(rv); } } #endif exit: Py_XDECREF(old_completer); Py_XDECREF(readline); Py_XDECREF(builtins); return result; } static PyObject *console_emptyline(consoleobject *self, PyObject *noarg) { Py_INCREF(Py_None); return Py_None; } static char console_exit_doc[] = "terminate the console"; static PyObject *console_exit(consoleobject *self, PyObject *arg) { Py_INCREF(Py_True); return Py_True; } static char console_debug_doc[] = "sets or displays the debug level"; static PyObject *console_debug(consoleobject *self, PyObject *arg) { PyObject *verbose; if (PyObject_IsTrue(arg)) { if ((verbose = PyNumber_Int(arg)) == NULL) { if (PyErr_ExceptionMatches(PyExc_ValueError)) { PyErr_Clear(); PySys_WriteStdout("usage: debug \n"); Py_INCREF(Py_None); return Py_None; } return NULL; } self->parser->verbose = PyInt_AsLong(verbose); Py_DECREF(verbose); } PySys_WriteStdout("debug level is %d\n", self->parser->verbose); Py_INCREF(Py_None); return Py_None; } static char console_parse_doc[] = "parses an expression"; static PyObject *console_parse(consoleobject *self, PyObject *arg) { PyObject *result, *value; result = parser_parse(self->parser, arg); if (result == NULL) { PyObject *exc, *tb; if (!PyErr_ExceptionMatches(PyExc_SyntaxError)) return NULL; PyErr_Fetch(&exc, &value, &tb); if (value && value != Py_None) { result = PyObject_Str(value); } Py_XDECREF(exc); Py_XDECREF(value); Py_XDECREF(tb); if (result == NULL) return NULL; } value = PyObject_Str(result); Py_DECREF(result); if (value == NULL) return NULL; PySys_WriteStdout("%s\n", PyString_AsString(value)); Py_DECREF(value); Py_INCREF(Py_None); return Py_None; } static PyMethodDef console_methods[] = { { "cmdloop", (PyCFunction) console_cmdloop, METH_O }, { "emptyline", (PyCFunction) console_emptyline, METH_NOARGS }, { "do_exit", (PyCFunction) console_exit, METH_O, console_exit_doc }, { "do_quit", (PyCFunction) console_exit, METH_O, console_exit_doc }, { "do_debug", (PyCFunction) console_debug, METH_O, console_debug_doc }, { "do_parse", (PyCFunction) console_parse, METH_O, console_parse_doc }, { NULL } }; static PyTypeObject Console_Type = { /* PyObject_HEAD */ PyObject_HEAD_INIT(NULL) /* ob_size */ 0, /* tp_name */ "Console", /* tp_basicsize */ sizeof(consoleobject), /* tp_itemsize */ 0, /* tp_dealloc */ (destructor) console_dealloc, /* tp_print */ (printfunc) 0, /* tp_getattr */ (getattrfunc) 0, /* tp_setattr */ (setattrfunc) 0, /* tp_compare */ (cmpfunc) 0, /* tp_repr */ (reprfunc) 0, /* tp_as_number */ (PyNumberMethods *) 0, /* tp_as_sequence */ (PySequenceMethods *) 0, /* tp_as_mapping */ (PyMappingMethods *) 0, /* tp_hash */ (hashfunc) 0, /* tp_call */ (ternaryfunc) 0, /* tp_str */ (reprfunc) 0, /* tp_getattro */ (getattrofunc) 0, /* tp_setattro */ (setattrofunc) 0, /* tp_as_buffer */ (PyBufferProcs *) 0, /* tp_flags */ Py_TPFLAGS_DEFAULT, /* tp_doc */ (char *) 0, /* tp_traverse */ (traverseproc) 0, /* tp_clear */ (inquiry) 0, /* tp_richcompare */ (richcmpfunc) 0, /* tp_weaklistoffset */ 0, /* tp_iter */ (getiterfunc) 0, /* tp_iternext */ (iternextfunc) 0, /* tp_methods */ (PyMethodDef *) console_methods, /* tp_members */ (PyMemberDef *) 0, /* tp_getset */ (PyGetSetDef *) 0, /* tp_base */ (PyTypeObject *) 0, /* tp_dict */ (PyObject *) 0, /* tp_descr_get */ (descrgetfunc) 0, /* tp_descr_set */ (descrsetfunc) 0, /* tp_dictoffset */ offsetof(consoleobject, dict), /* tp_init */ (initproc) console_init, /* tp_alloc */ (allocfunc) 0, /* tp_new */ (newfunc) console_new, /* tp_free */ 0, }; static char console_doc[] = CONSOLE_NAME "\ ()\n\ Starts an interactive parser console."; static PyObject *module_console(PyObject *module, PyObject *args) { PyObject *console, *result; args = PyTuple_New(0); if (args == NULL) return NULL; console = PyObject_Call((PyObject *) &Console_Type, args, NULL); if (console == NULL) { Py_DECREF(args); return NULL; } result = console_cmdloop((consoleobject *) console, args); Py_DECREF(args); Py_DECREF(console); return result; } static PyMethodDef module_methods[] = { { CONSOLE_NAME, module_console, METH_NOARGS, console_doc }, { NULL } }; static void import_modules(void); #ifndef PyMODINIT_FUNC #define PyMODINIT_FUNC DL_EXPORT(void) #endif PyMODINIT_FUNC MODULE_INITFUNC(void) { PyObject *import, *class, *item, *module; if (PyType_Ready(&Parser_Type) < 0) return; /* Setup the console type's base classes */ import = PyImport_ImportModule("cmd"); if (import == NULL) return; class = PyObject_GetAttrString(import, "Cmd"); Py_DECREF(import); if (class == NULL) return; Console_Type.tp_base = &PyBaseObject_Type; Console_Type.tp_bases = Py_BuildValue("(ON)", class, &PyBaseObject_Type); if (Console_Type.tp_bases == NULL) return; if (PyType_Ready(&Console_Type) < 0) return; item = PyString_FromString(PARSER_NAME "> "); if (PyDict_SetItemString(Console_Type.tp_dict, "prompt", item) < 0) return; Py_DECREF(item); module = Py_InitModule(PARSER_NAME "c", module_methods); if (module == NULL) return; Py_INCREF(&Parser_Type); PyModule_AddObject(module, "new", (PyObject *) &Parser_Type); Py_INCREF(&Parser_Type); PyModule_AddObject(module, PARSER_NAME, (PyObject *) &Parser_Type); /* import the modules required for action routines */ import_modules(); } static PyObject *import_from(char *modulename, char *fromname) { PyObject *fromlist, *name, *module; fromlist = PyTuple_New(1); if (fromlist == NULL) return NULL; name = PyString_FromString(fromname); if (name == NULL) { Py_DECREF(fromlist); return NULL; } Py_INCREF(name); PyTuple_SET_ITEM(fromlist, 0, name); module = PyImport_ImportModuleEx(modulename, NULL, NULL, fromlist); Py_DECREF(fromlist); if (module == NULL) { Py_DECREF(name); return NULL; } fromlist = PyObject_GetAttr(module, name); Py_DECREF(module); Py_DECREF(name); return fromlist; } static void import_modules(void) { /* from rdflib.sparql.bison import IRIRef */ IRIRef = import_from("rdflib.sparql.bison", "IRIRef"); if (IRIRef == NULL) return; /* from rdflib.sparql.bison import Bindings */ Bindings = import_from("rdflib.sparql.bison", "Bindings"); if (Bindings == NULL) return; /* from rdflib.sparql.bison import Query */ Query = import_from("rdflib.sparql.bison", "Query"); if (Query == NULL) return; /* from rdflib.sparql.bison import QName */ QName = import_from("rdflib.sparql.bison", "QName"); if (QName == NULL) return; /* from rdflib.sparql.bison import GraphPattern */ GraphPattern = import_from("rdflib.sparql.bison", "GraphPattern"); if (GraphPattern == NULL) return; /* from rdflib.sparql.bison import FunctionLibrary */ FunctionLibrary = import_from("rdflib.sparql.bison", "FunctionLibrary"); if (FunctionLibrary == NULL) return; /* from rdflib.sparql.bison import Operators */ Operators = import_from("rdflib.sparql.bison", "Operators"); if (Operators == NULL) return; /* from rdflib.sparql.bison import Triples */ Triples = import_from("rdflib.sparql.bison", "Triples"); if (Triples == NULL) return; /* from rdflib.sparql.bison import Resource */ Resource = import_from("rdflib.sparql.bison", "Resource"); if (Resource == NULL) return; /* from rdflib.sparql.bison import Filter */ Filter = import_from("rdflib.sparql.bison", "Filter"); if (Filter == NULL) return; /* from rdflib.sparql.bison import Util */ Util = import_from("rdflib.sparql.bison", "Util"); if (Util == NULL) return; /* from rdflib.sparql.bison import Expression */ Expression = import_from("rdflib.sparql.bison", "Expression"); if (Expression == NULL) return; /* from rdflib.sparql.bison import SolutionModifier */ SolutionModifier = import_from("rdflib.sparql.bison", "SolutionModifier"); if (SolutionModifier == NULL) return; /* import rdflib */ rdflib = PyImport_ImportModule("rdflib"); if (rdflib == NULL) return; /* from rdflib import RDF */ RDF = import_from("rdflib", "RDF"); if (RDF == NULL) return; } rdflib-2.4.2/src/bison/SPARQLParser.output0000644000175000017500000051216311153616037017300 0ustar nachonachoTerminals which are not used WHITESPACE COLON QUESTION_MARK DOLLAR Grammar 1 Query: Prolog QueryTypes 2 QueryTypes: SelectQuery 3 | ConstructQuery 4 | DescribeQuery 5 | AskQuery 6 DescribeQuery: DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier 7 | DESCRIBE VAR_REFERENCES SolutionModifier 8 | DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier 9 | DESCRIBE VAR_REFERENCES WhereClause SolutionModifier 10 ConstructQuery: CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier 11 | CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier 12 Prolog: BaseDecl PrefixDeclList 13 | BaseDecl 14 | PrefixDeclList 15 | /* empty */ 16 PrefixDeclList: PrefixDecl 17 | PrefixDeclList PrefixDecl 18 PrefixDecl: PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN 19 | PREFIX PNAME_NS LESS_THAN GREATER_THAN 20 BaseDecl: BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN 21 AskQuery: ASK WhereClause 22 | ASK DataSetClauseList WhereClause 23 SelectQuery: SELECT VAR_REFERENCES WhereClause SolutionModifier 24 | SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier 25 | SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier 26 | SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier 27 VAR_REFERENCES: VariableReferenceList 28 | ASTERISK 29 VariableReferenceList: Var 30 | VariableReferenceList Var 31 IRIref: LESS_THAN Q_IRI_CONTENT GREATER_THAN 32 | PrefixedName 33 PrefixedName: PNAME_NS 34 | PNAME_LN 35 DataSetClauseList: DataSetClause 36 | DataSetClauseList DataSetClause 37 DataSetClause: FROM IRIref 38 | FROM NAMED IRIref 39 WhereClause: WHERE GroupGraphPattern 40 | GroupGraphPattern 41 SolutionModifier: /* empty */ 42 | OrderClause 43 | OrderClause LimitClause 44 | OrderClause LimitClause OffsetClause 45 | OrderClause OffsetClause LimitClause 46 | LimitClause OffsetClause 47 | OrderClause OffsetClause 48 | OffsetClause 49 | LimitClause 50 OrderClause: ORDER BY OrderConditionList 51 OrderConditionList: OrderCondition 52 | OrderConditionList OrderCondition 53 OrderCondition: ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 54 | DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 55 | FunctionCall 56 | BuiltInCall 57 | LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 58 | Var 59 LimitClause: LIMIT NumericLiteral 60 OffsetClause: OFFSET NumericLiteral 61 GroupGraphPattern: LEFT_CURLY RIGHT_CURLY 62 | LEFT_CURLY Triples GraphPatternList RIGHT_CURLY 63 | LEFT_CURLY Triples RIGHT_CURLY 64 | LEFT_CURLY GraphPatternList RIGHT_CURLY 65 GraphPatternList: GraphPattern 66 | GraphPatternList GraphPattern 67 GraphPattern: Filter Triples 68 | Filter DOT Triples 69 | Filter DOT 70 | Filter 71 | GraphPatternNotTriples Triples 72 | GraphPatternNotTriples DOT Triples 73 | GraphPatternNotTriples 74 | GraphPatternNotTriples DOT 75 GraphPatternNotTriples: OPTIONAL GroupGraphPattern 76 | GroupGraphPattern 77 | GroupGraphPattern AlternativeGroupGraphPatterns 78 | GRAPH Var GroupGraphPattern 79 | GRAPH BlankNode GroupGraphPattern 80 | GRAPH IRIref GroupGraphPattern 81 AlternativeGroupGraphPatterns: UNION GroupGraphPattern 82 | AlternativeGroupGraphPatterns UNION GroupGraphPattern 83 ConditionalOrExpression: ConditionalAndExpression 84 | ConditionalAndExpression ConditionalAndExpressionList 85 ConditionalAndExpressionList: DOUBLE_PIPE ConditionalAndExpression 86 | ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression 87 ConditionalAndExpression: RelationalExpression ValueLogicalList 88 | RelationalExpression 89 ValueLogicalList: DOUBLE_AMPERSAND RelationalExpression 90 | ValueLogicalList DOUBLE_AMPERSAND RelationalExpression 91 RelationalExpression: AdditiveExpression 92 | AdditiveExpression EQUALITY_OP AdditiveExpression 93 | AdditiveExpression NOT_EQUAL AdditiveExpression 94 | AdditiveExpression LESS_THAN AdditiveExpression 95 | AdditiveExpression GREATER_THAN AdditiveExpression 96 | AdditiveExpression LESS_THAN_EQUAL AdditiveExpression 97 | AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression 98 AdditiveExpression: MultiplicativeExpression 99 | MultiplicativeExpression MultiplicativeExpressionList 100 MultiplicativeExpressionList: PLUS MultiplicativeExpression 101 | MINUS MultiplicativeExpression 102 | MultiplicativeExpressionList MINUS MultiplicativeExpression 103 | MultiplicativeExpressionList PLUS MultiplicativeExpression 104 MultiplicativeExpression: UnaryExpression 105 | UnaryExpression UnaryExpressionList 106 UnaryExpressionList: ASTERISK UnaryExpression 107 | FORWARDSLASH UnaryExpression 108 | UnaryExpressionList ASTERISK UnaryExpression 109 | UnaryExpressionList FORWARDSLASH UnaryExpression 110 UnaryExpression: BANG PrimaryExpression 111 | PLUS PrimaryExpression 112 | MINUS PrimaryExpression 113 | PrimaryExpression 114 BuiltInCall: STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 115 | LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 116 | LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN 117 | DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 118 | BOUND LEFT_PAREN Var RIGHT_PAREN 119 | isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 120 | isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 121 | isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 122 | isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 123 | RegexExpression 124 RegexExpression: REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN 125 | REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN 126 FunctionCall: IRIref LEFT_PAREN ArgumentList RIGHT_PAREN 127 | IRIref NIL 128 ArgumentList: ConditionalOrExpression 129 | ConditionalOrExpression COMMA ArgumentList 130 PrimaryExpression: LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 131 | BuiltInCall 132 | IRIref 133 | FunctionCall 134 | RDFLiteral 135 | NumericLiteral 136 | BooleanLiteral 137 | BlankNode 138 | Var 139 Filter: FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN 140 | FILTER BuiltInCall 141 | FILTER FunctionCall 142 Triples: Triples DOT TriplesSameSubject 143 | Triples DOT 144 | TriplesSameSubject 145 TriplesSameSubject: Var PropertyListNotEmpty 146 | GraphTerm PropertyListNotEmpty 147 | LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList 148 | Collection PropertyListNotEmpty 149 | Collection 150 PropertyList: PropertyListNotEmpty 151 | /* empty */ 152 PropertyListNotEmpty: Verb ObjectList 153 | Verb ObjectList SEMICOLON PropertyList 154 ObjectList: GraphNode 155 | ObjectList COMMA GraphNode 156 GraphNode: Var 157 | TriplesNode 158 | GraphTerm 159 Verb: Var 160 | IRIref 161 | A 162 TriplesNode: Collection 163 | LEFT_SQUARE PropertyList RIGHT_SQUARE 164 Collection: LEFT_PAREN GraphNodeList RIGHT_PAREN 165 GraphNodeList: GraphNode 166 | GraphNodeList GraphNode 167 Var: VARNAME 168 GraphTerm: IRIref 169 | RDFLiteral 170 | NumericLiteral 171 | PLUS NumericLiteral 172 | MINUS NumericLiteral 173 | BooleanLiteral 174 | BlankNode 175 | NIL 176 NumericLiteral: INTEGER 177 | DECIMAL 178 | DOUBLE 179 RDFLiteral: String 180 | String LANGTAG 181 | String DOUBLE_HAT IRIref 182 BooleanLiteral: TRUE 183 | FALSE 184 String: STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 185 | STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 186 | STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 187 | STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 188 | STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 189 | STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 190 | STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 191 | STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 192 BlankNode: ANON 193 | BLANK_NODE_LABEL Terminals, with rules where they appear (-1) error (256) WHITESPACE (257) UNION (258) 81 82 COLON (259) Q_IRI_CONTENT (260) 18 20 31 PNAME_NS (261) 18 19 33 CONSTRUCT (262) 10 11 DESCRIBE (263) 6 7 8 9 PNAME_LN (264) 34 BLANK_NODE_LABEL (265) 193 VARNAME (266) 167 PREFIX (267) 18 19 ASTERISK (268) 28 106 108 DOT (269) 68 69 72 74 142 143 QUESTION_MARK (270) DOLLAR (271) BASE (272) 20 SELECT (273) 23 24 25 26 DISTINCT (274) 25 26 FROM (275) 37 38 NAMED (276) 38 OPTIONAL (277) 75 FILTER (278) 139 140 141 GRAPH (279) 78 79 80 WHERE (280) 39 ORDER (281) 50 BY (282) 50 ASC (283) 53 ASK (284) 21 22 DESC (285) 54 LIMIT (286) 59 OFFSET (287) 60 STR (288) 114 LANG (289) 115 LANGMATCHES (290) 116 DATATYPE (291) 117 isIRI (292) 119 isURI (293) 120 isLITERAL (294) 122 isBLANK (295) 121 BOUND (296) 118 REGEX (297) 124 125 A (298) 161 TRUE (299) 182 FALSE (300) 183 DOUBLE_AMPERSAND (301) 89 90 DOUBLE_PIPE (302) 85 86 BANG (303) 110 DOUBLE_HAT (304) 181 COMMA (305) 116 124 125 129 155 FORWARDSLASH (306) 107 109 LEFT_PAREN (307) 53 54 57 114 115 116 117 118 119 120 121 122 124 125 126 130 139 164 RIGHT_PAREN (308) 53 54 57 114 115 116 117 118 119 120 121 122 124 125 126 130 139 164 LEFT_SQUARE (309) 147 163 RIGHT_SQUARE (310) 147 163 SEMICOLON (311) 153 INTEGER (312) 176 DECIMAL (313) 177 DOUBLE (314) 178 STRING_LITERAL_DELIMETER_1 (315) 184 188 STRING_LITERAL_DELIMETER_2 (316) 186 190 STRING_LITERAL_DELIMETER_3 (317) 185 189 STRING_LITERAL_DELIMETER_4 (318) 187 191 STRING_LITERAL1 (319) 184 STRING_LITERAL2 (320) 185 STRING_LITERAL_LONG1 (321) 186 STRING_LITERAL_LONG2 (322) 187 NIL (323) 127 175 ANON (324) 192 LANGTAG (325) 180 LEFT_CURLY (326) 10 11 61 62 63 64 RIGHT_CURLY (327) 10 11 61 62 63 64 PLUS (328) 100 103 111 171 MINUS (329) 101 102 112 172 EQUALITY_OP (330) 92 NOT_EQUAL (331) 93 LESS_THAN (332) 18 19 20 31 94 GREATER_THAN (333) 18 19 20 31 95 LESS_THAN_EQUAL (334) 96 GREATER_THAN_EQUAL (335) 97 Nonterminals, with rules where they appear Query (82) on left: 1 QueryTypes (83) on left: 2 3 4 5, on right: 1 DescribeQuery (84) on left: 6 7 8 9, on right: 4 ConstructQuery (85) on left: 10 11, on right: 3 Prolog (86) on left: 12 13 14 15, on right: 1 PrefixDeclList (87) on left: 16 17, on right: 12 14 17 PrefixDecl (88) on left: 18 19, on right: 16 17 BaseDecl (89) on left: 20, on right: 12 13 AskQuery (90) on left: 21 22, on right: 5 SelectQuery (91) on left: 23 24 25 26, on right: 2 VAR_REFERENCES (92) on left: 27 28, on right: 6 7 8 9 23 24 25 26 VariableReferenceList (93) on left: 29 30, on right: 27 30 IRIref (94) on left: 31 32, on right: 37 38 80 126 127 132 160 168 181 PrefixedName (95) on left: 33 34, on right: 32 DataSetClauseList (96) on left: 35 36, on right: 6 8 10 22 24 26 36 DataSetClause (97) on left: 37 38, on right: 35 36 WhereClause (98) on left: 39 40, on right: 6 9 10 11 21 22 23 24 25 26 SolutionModifier (99) on left: 41 42 43 44 45 46 47 48 49, on right: 6 7 8 9 10 11 23 24 25 26 OrderClause (100) on left: 50, on right: 42 43 44 45 47 OrderConditionList (101) on left: 51 52, on right: 50 52 OrderCondition (102) on left: 53 54 55 56 57 58, on right: 51 52 LimitClause (103) on left: 59, on right: 43 44 45 46 49 OffsetClause (104) on left: 60, on right: 44 45 46 47 48 GroupGraphPattern (105) on left: 61 62 63 64, on right: 39 40 75 76 77 78 79 80 81 82 GraphPatternList (106) on left: 65 66, on right: 62 64 66 GraphPattern (107) on left: 67 68 69 70 71 72 73 74, on right: 65 66 GraphPatternNotTriples (108) on left: 75 76 77 78 79 80, on right: 71 72 73 74 AlternativeGroupGraphPatterns (109) on left: 81 82, on right: 77 82 ConditionalOrExpression (110) on left: 83 84, on right: 53 54 57 114 115 116 117 119 120 121 122 124 125 128 129 130 139 ConditionalAndExpressionList (111) on left: 85 86, on right: 84 86 ConditionalAndExpression (112) on left: 87 88, on right: 83 84 85 86 ValueLogicalList (113) on left: 89 90, on right: 87 90 RelationalExpression (114) on left: 91 92 93 94 95 96 97, on right: 87 88 89 90 AdditiveExpression (115) on left: 98 99, on right: 91 92 93 94 95 96 97 MultiplicativeExpressionList (116) on left: 100 101 102 103, on right: 99 102 103 MultiplicativeExpression (117) on left: 104 105, on right: 98 99 100 101 102 103 UnaryExpressionList (118) on left: 106 107 108 109, on right: 105 108 109 UnaryExpression (119) on left: 110 111 112 113, on right: 104 105 106 107 108 109 BuiltInCall (120) on left: 114 115 116 117 118 119 120 121 122 123, on right: 56 131 140 RegexExpression (121) on left: 124 125, on right: 123 FunctionCall (122) on left: 126 127, on right: 55 133 141 ArgumentList (123) on left: 128 129, on right: 126 129 PrimaryExpression (124) on left: 130 131 132 133 134 135 136 137 138, on right: 110 111 112 113 Filter (125) on left: 139 140 141, on right: 67 68 69 70 Triples (126) on left: 142 143 144, on right: 10 11 62 63 67 68 71 72 142 143 TriplesSameSubject (127) on left: 145 146 147 148 149, on right: 142 144 PropertyList (128) on left: 150 151, on right: 147 153 163 PropertyListNotEmpty (129) on left: 152 153, on right: 145 146 147 148 150 ObjectList (130) on left: 154 155, on right: 152 153 155 GraphNode (131) on left: 156 157 158, on right: 154 155 165 166 Verb (132) on left: 159 160 161, on right: 152 153 TriplesNode (133) on left: 162 163, on right: 157 Collection (134) on left: 164, on right: 148 149 162 GraphNodeList (135) on left: 165 166, on right: 164 166 Var (136) on left: 167, on right: 29 30 58 78 118 138 145 156 159 GraphTerm (137) on left: 168 169 170 171 172 173 174 175, on right: 146 158 NumericLiteral (138) on left: 176 177 178, on right: 59 60 135 170 171 172 RDFLiteral (139) on left: 179 180 181, on right: 134 169 BooleanLiteral (140) on left: 182 183, on right: 136 173 String (141) on left: 184 185 186 187 188 189 190 191, on right: 179 180 181 BlankNode (142) on left: 192 193, on right: 79 137 174 state 0 PREFIX shift, and go to state 1 BASE shift, and go to state 2 $default reduce using rule 15 (Prolog) Query go to state 340 Prolog go to state 3 PrefixDeclList go to state 4 PrefixDecl go to state 5 BaseDecl go to state 6 state 1 PrefixDecl -> PREFIX . PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN (rule 18) PrefixDecl -> PREFIX . PNAME_NS LESS_THAN GREATER_THAN (rule 19) PNAME_NS shift, and go to state 7 state 2 BaseDecl -> BASE . LESS_THAN Q_IRI_CONTENT GREATER_THAN (rule 20) LESS_THAN shift, and go to state 8 state 3 Query -> Prolog . QueryTypes (rule 1) CONSTRUCT shift, and go to state 9 DESCRIBE shift, and go to state 10 SELECT shift, and go to state 11 ASK shift, and go to state 12 QueryTypes go to state 13 DescribeQuery go to state 14 ConstructQuery go to state 15 AskQuery go to state 16 SelectQuery go to state 17 state 4 Prolog -> PrefixDeclList . (rule 14) PrefixDeclList -> PrefixDeclList . PrefixDecl (rule 17) PREFIX shift, and go to state 1 $default reduce using rule 14 (Prolog) PrefixDecl go to state 18 state 5 PrefixDeclList -> PrefixDecl . (rule 16) $default reduce using rule 16 (PrefixDeclList) state 6 Prolog -> BaseDecl . PrefixDeclList (rule 12) Prolog -> BaseDecl . (rule 13) PREFIX shift, and go to state 1 $default reduce using rule 13 (Prolog) PrefixDeclList go to state 19 PrefixDecl go to state 5 state 7 PrefixDecl -> PREFIX PNAME_NS . LESS_THAN Q_IRI_CONTENT GREATER_THAN (rule 18) PrefixDecl -> PREFIX PNAME_NS . LESS_THAN GREATER_THAN (rule 19) LESS_THAN shift, and go to state 20 state 8 BaseDecl -> BASE LESS_THAN . Q_IRI_CONTENT GREATER_THAN (rule 20) Q_IRI_CONTENT shift, and go to state 21 state 9 ConstructQuery -> CONSTRUCT . LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier (rule 10) ConstructQuery -> CONSTRUCT . LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier (rule 11) LEFT_CURLY shift, and go to state 22 state 10 DescribeQuery -> DESCRIBE . VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier (rule 6) DescribeQuery -> DESCRIBE . VAR_REFERENCES SolutionModifier (rule 7) DescribeQuery -> DESCRIBE . VAR_REFERENCES DataSetClauseList SolutionModifier (rule 8) DescribeQuery -> DESCRIBE . VAR_REFERENCES WhereClause SolutionModifier (rule 9) VARNAME shift, and go to state 23 ASTERISK shift, and go to state 24 VAR_REFERENCES go to state 25 VariableReferenceList go to state 26 Var go to state 27 state 11 SelectQuery -> SELECT . VAR_REFERENCES WhereClause SolutionModifier (rule 23) SelectQuery -> SELECT . VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier (rule 24) SelectQuery -> SELECT . DISTINCT VAR_REFERENCES WhereClause SolutionModifier (rule 25) SelectQuery -> SELECT . DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier (rule 26) VARNAME shift, and go to state 23 ASTERISK shift, and go to state 24 DISTINCT shift, and go to state 28 VAR_REFERENCES go to state 29 VariableReferenceList go to state 26 Var go to state 27 state 12 AskQuery -> ASK . WhereClause (rule 21) AskQuery -> ASK . DataSetClauseList WhereClause (rule 22) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClauseList go to state 33 DataSetClause go to state 34 WhereClause go to state 35 GroupGraphPattern go to state 36 state 13 Query -> Prolog QueryTypes . (rule 1) $default reduce using rule 1 (Query) state 14 QueryTypes -> DescribeQuery . (rule 4) $default reduce using rule 4 (QueryTypes) state 15 QueryTypes -> ConstructQuery . (rule 3) $default reduce using rule 3 (QueryTypes) state 16 QueryTypes -> AskQuery . (rule 5) $default reduce using rule 5 (QueryTypes) state 17 QueryTypes -> SelectQuery . (rule 2) $default reduce using rule 2 (QueryTypes) state 18 PrefixDeclList -> PrefixDeclList PrefixDecl . (rule 17) $default reduce using rule 17 (PrefixDeclList) state 19 Prolog -> BaseDecl PrefixDeclList . (rule 12) PrefixDeclList -> PrefixDeclList . PrefixDecl (rule 17) PREFIX shift, and go to state 1 $default reduce using rule 12 (Prolog) PrefixDecl go to state 18 state 20 PrefixDecl -> PREFIX PNAME_NS LESS_THAN . Q_IRI_CONTENT GREATER_THAN (rule 18) PrefixDecl -> PREFIX PNAME_NS LESS_THAN . GREATER_THAN (rule 19) Q_IRI_CONTENT shift, and go to state 37 GREATER_THAN shift, and go to state 38 state 21 BaseDecl -> BASE LESS_THAN Q_IRI_CONTENT . GREATER_THAN (rule 20) GREATER_THAN shift, and go to state 39 state 22 ConstructQuery -> CONSTRUCT LEFT_CURLY . Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier (rule 10) ConstructQuery -> CONSTRUCT LEFT_CURLY . Triples RIGHT_CURLY WhereClause SolutionModifier (rule 11) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 Triples go to state 61 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 23 Var -> VARNAME . (rule 167) $default reduce using rule 167 (Var) state 24 VAR_REFERENCES -> ASTERISK . (rule 28) $default reduce using rule 28 (VAR_REFERENCES) state 25 DescribeQuery -> DESCRIBE VAR_REFERENCES . DataSetClauseList WhereClause SolutionModifier (rule 6) DescribeQuery -> DESCRIBE VAR_REFERENCES . SolutionModifier (rule 7) DescribeQuery -> DESCRIBE VAR_REFERENCES . DataSetClauseList SolutionModifier (rule 8) DescribeQuery -> DESCRIBE VAR_REFERENCES . WhereClause SolutionModifier (rule 9) FROM shift, and go to state 30 WHERE shift, and go to state 31 ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 LEFT_CURLY shift, and go to state 32 $default reduce using rule 41 (SolutionModifier) DataSetClauseList go to state 74 DataSetClause go to state 34 WhereClause go to state 75 SolutionModifier go to state 76 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 GroupGraphPattern go to state 36 state 26 VAR_REFERENCES -> VariableReferenceList . (rule 27) VariableReferenceList -> VariableReferenceList . Var (rule 30) VARNAME shift, and go to state 23 $default reduce using rule 27 (VAR_REFERENCES) Var go to state 80 state 27 VariableReferenceList -> Var . (rule 29) $default reduce using rule 29 (VariableReferenceList) state 28 SelectQuery -> SELECT DISTINCT . VAR_REFERENCES WhereClause SolutionModifier (rule 25) SelectQuery -> SELECT DISTINCT . VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier (rule 26) VARNAME shift, and go to state 23 ASTERISK shift, and go to state 24 VAR_REFERENCES go to state 81 VariableReferenceList go to state 26 Var go to state 27 state 29 SelectQuery -> SELECT VAR_REFERENCES . WhereClause SolutionModifier (rule 23) SelectQuery -> SELECT VAR_REFERENCES . DataSetClauseList WhereClause SolutionModifier (rule 24) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClauseList go to state 82 DataSetClause go to state 34 WhereClause go to state 83 GroupGraphPattern go to state 36 state 30 DataSetClause -> FROM . IRIref (rule 37) DataSetClause -> FROM . NAMED IRIref (rule 38) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 NAMED shift, and go to state 84 LESS_THAN shift, and go to state 58 IRIref go to state 85 PrefixedName go to state 60 state 31 WhereClause -> WHERE . GroupGraphPattern (rule 39) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 86 state 32 GroupGraphPattern -> LEFT_CURLY . RIGHT_CURLY (rule 61) GroupGraphPattern -> LEFT_CURLY . Triples GraphPatternList RIGHT_CURLY (rule 62) GroupGraphPattern -> LEFT_CURLY . Triples RIGHT_CURLY (rule 63) GroupGraphPattern -> LEFT_CURLY . GraphPatternList RIGHT_CURLY (rule 64) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 OPTIONAL shift, and go to state 87 FILTER shift, and go to state 88 GRAPH shift, and go to state 89 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 LEFT_CURLY shift, and go to state 32 RIGHT_CURLY shift, and go to state 90 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 GroupGraphPattern go to state 91 GraphPatternList go to state 92 GraphPattern go to state 93 GraphPatternNotTriples go to state 94 Filter go to state 95 Triples go to state 96 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 33 AskQuery -> ASK DataSetClauseList . WhereClause (rule 22) DataSetClauseList -> DataSetClauseList . DataSetClause (rule 36) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClause go to state 97 WhereClause go to state 98 GroupGraphPattern go to state 36 state 34 DataSetClauseList -> DataSetClause . (rule 35) $default reduce using rule 35 (DataSetClauseList) state 35 AskQuery -> ASK WhereClause . (rule 21) $default reduce using rule 21 (AskQuery) state 36 WhereClause -> GroupGraphPattern . (rule 40) $default reduce using rule 40 (WhereClause) state 37 PrefixDecl -> PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT . GREATER_THAN (rule 18) GREATER_THAN shift, and go to state 99 state 38 PrefixDecl -> PREFIX PNAME_NS LESS_THAN GREATER_THAN . (rule 19) $default reduce using rule 19 (PrefixDecl) state 39 BaseDecl -> BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN . (rule 20) $default reduce using rule 20 (BaseDecl) state 40 PrefixedName -> PNAME_NS . (rule 33) $default reduce using rule 33 (PrefixedName) state 41 PrefixedName -> PNAME_LN . (rule 34) $default reduce using rule 34 (PrefixedName) state 42 BlankNode -> BLANK_NODE_LABEL . (rule 193) $default reduce using rule 193 (BlankNode) state 43 BooleanLiteral -> TRUE . (rule 182) $default reduce using rule 182 (BooleanLiteral) state 44 BooleanLiteral -> FALSE . (rule 183) $default reduce using rule 183 (BooleanLiteral) state 45 Collection -> LEFT_PAREN . GraphNodeList RIGHT_PAREN (rule 164) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 100 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 GraphNode go to state 101 TriplesNode go to state 102 Collection go to state 103 GraphNodeList go to state 104 Var go to state 105 GraphTerm go to state 106 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 46 TriplesSameSubject -> LEFT_SQUARE . PropertyListNotEmpty RIGHT_SQUARE PropertyList (rule 147) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 IRIref go to state 108 PrefixedName go to state 60 PropertyListNotEmpty go to state 109 Verb go to state 110 Var go to state 111 state 47 NumericLiteral -> INTEGER . (rule 176) $default reduce using rule 176 (NumericLiteral) state 48 NumericLiteral -> DECIMAL . (rule 177) $default reduce using rule 177 (NumericLiteral) state 49 NumericLiteral -> DOUBLE . (rule 178) $default reduce using rule 178 (NumericLiteral) state 50 String -> STRING_LITERAL_DELIMETER_1 . STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 (rule 184) String -> STRING_LITERAL_DELIMETER_1 . STRING_LITERAL_DELIMETER_1 (rule 188) STRING_LITERAL_DELIMETER_1 shift, and go to state 112 STRING_LITERAL1 shift, and go to state 113 state 51 String -> STRING_LITERAL_DELIMETER_2 . STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 (rule 186) String -> STRING_LITERAL_DELIMETER_2 . STRING_LITERAL_DELIMETER_2 (rule 190) STRING_LITERAL_DELIMETER_2 shift, and go to state 114 STRING_LITERAL_LONG1 shift, and go to state 115 state 52 String -> STRING_LITERAL_DELIMETER_3 . STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 (rule 185) String -> STRING_LITERAL_DELIMETER_3 . STRING_LITERAL_DELIMETER_3 (rule 189) STRING_LITERAL_DELIMETER_3 shift, and go to state 116 STRING_LITERAL2 shift, and go to state 117 state 53 String -> STRING_LITERAL_DELIMETER_4 . STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 (rule 187) String -> STRING_LITERAL_DELIMETER_4 . STRING_LITERAL_DELIMETER_4 (rule 191) STRING_LITERAL_DELIMETER_4 shift, and go to state 118 STRING_LITERAL_LONG2 shift, and go to state 119 state 54 GraphTerm -> NIL . (rule 175) $default reduce using rule 175 (GraphTerm) state 55 BlankNode -> ANON . (rule 192) $default reduce using rule 192 (BlankNode) state 56 GraphTerm -> PLUS . NumericLiteral (rule 171) INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 NumericLiteral go to state 120 state 57 GraphTerm -> MINUS . NumericLiteral (rule 172) INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 NumericLiteral go to state 121 state 58 IRIref -> LESS_THAN . Q_IRI_CONTENT GREATER_THAN (rule 31) Q_IRI_CONTENT shift, and go to state 122 state 59 GraphTerm -> IRIref . (rule 168) $default reduce using rule 168 (GraphTerm) state 60 IRIref -> PrefixedName . (rule 32) $default reduce using rule 32 (IRIref) state 61 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples . RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier (rule 10) ConstructQuery -> CONSTRUCT LEFT_CURLY Triples . RIGHT_CURLY WhereClause SolutionModifier (rule 11) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 RIGHT_CURLY shift, and go to state 124 state 62 Triples -> TriplesSameSubject . (rule 144) $default reduce using rule 144 (Triples) state 63 TriplesSameSubject -> Collection . PropertyListNotEmpty (rule 148) TriplesSameSubject -> Collection . (rule 149) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 $default reduce using rule 149 (TriplesSameSubject) IRIref go to state 108 PrefixedName go to state 60 PropertyListNotEmpty go to state 125 Verb go to state 110 Var go to state 111 state 64 TriplesSameSubject -> Var . PropertyListNotEmpty (rule 145) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 IRIref go to state 108 PrefixedName go to state 60 PropertyListNotEmpty go to state 126 Verb go to state 110 Var go to state 111 state 65 TriplesSameSubject -> GraphTerm . PropertyListNotEmpty (rule 146) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 IRIref go to state 108 PrefixedName go to state 60 PropertyListNotEmpty go to state 127 Verb go to state 110 Var go to state 111 state 66 GraphTerm -> NumericLiteral . (rule 170) $default reduce using rule 170 (GraphTerm) state 67 GraphTerm -> RDFLiteral . (rule 169) $default reduce using rule 169 (GraphTerm) state 68 GraphTerm -> BooleanLiteral . (rule 173) $default reduce using rule 173 (GraphTerm) state 69 RDFLiteral -> String . (rule 179) RDFLiteral -> String . LANGTAG (rule 180) RDFLiteral -> String . DOUBLE_HAT IRIref (rule 181) DOUBLE_HAT shift, and go to state 128 LANGTAG shift, and go to state 129 $default reduce using rule 179 (RDFLiteral) state 70 GraphTerm -> BlankNode . (rule 174) $default reduce using rule 174 (GraphTerm) state 71 OrderClause -> ORDER . BY OrderConditionList (rule 50) BY shift, and go to state 130 state 72 LimitClause -> LIMIT . NumericLiteral (rule 59) INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 NumericLiteral go to state 131 state 73 OffsetClause -> OFFSET . NumericLiteral (rule 60) INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 NumericLiteral go to state 132 state 74 DescribeQuery -> DESCRIBE VAR_REFERENCES DataSetClauseList . WhereClause SolutionModifier (rule 6) DescribeQuery -> DESCRIBE VAR_REFERENCES DataSetClauseList . SolutionModifier (rule 8) DataSetClauseList -> DataSetClauseList . DataSetClause (rule 36) FROM shift, and go to state 30 WHERE shift, and go to state 31 ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 LEFT_CURLY shift, and go to state 32 $default reduce using rule 41 (SolutionModifier) DataSetClause go to state 97 WhereClause go to state 133 SolutionModifier go to state 134 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 GroupGraphPattern go to state 36 state 75 DescribeQuery -> DESCRIBE VAR_REFERENCES WhereClause . SolutionModifier (rule 9) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 135 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 76 DescribeQuery -> DESCRIBE VAR_REFERENCES SolutionModifier . (rule 7) $default reduce using rule 7 (DescribeQuery) state 77 SolutionModifier -> OrderClause . (rule 42) SolutionModifier -> OrderClause . LimitClause (rule 43) SolutionModifier -> OrderClause . LimitClause OffsetClause (rule 44) SolutionModifier -> OrderClause . OffsetClause LimitClause (rule 45) SolutionModifier -> OrderClause . OffsetClause (rule 47) LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 42 (SolutionModifier) LimitClause go to state 136 OffsetClause go to state 137 state 78 SolutionModifier -> LimitClause . OffsetClause (rule 46) SolutionModifier -> LimitClause . (rule 49) OFFSET shift, and go to state 73 $default reduce using rule 49 (SolutionModifier) OffsetClause go to state 138 state 79 SolutionModifier -> OffsetClause . (rule 48) $default reduce using rule 48 (SolutionModifier) state 80 VariableReferenceList -> VariableReferenceList Var . (rule 30) $default reduce using rule 30 (VariableReferenceList) state 81 SelectQuery -> SELECT DISTINCT VAR_REFERENCES . WhereClause SolutionModifier (rule 25) SelectQuery -> SELECT DISTINCT VAR_REFERENCES . DataSetClauseList WhereClause SolutionModifier (rule 26) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClauseList go to state 139 DataSetClause go to state 34 WhereClause go to state 140 GroupGraphPattern go to state 36 state 82 SelectQuery -> SELECT VAR_REFERENCES DataSetClauseList . WhereClause SolutionModifier (rule 24) DataSetClauseList -> DataSetClauseList . DataSetClause (rule 36) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClause go to state 97 WhereClause go to state 141 GroupGraphPattern go to state 36 state 83 SelectQuery -> SELECT VAR_REFERENCES WhereClause . SolutionModifier (rule 23) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 142 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 84 DataSetClause -> FROM NAMED . IRIref (rule 38) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 LESS_THAN shift, and go to state 58 IRIref go to state 143 PrefixedName go to state 60 state 85 DataSetClause -> FROM IRIref . (rule 37) $default reduce using rule 37 (DataSetClause) state 86 WhereClause -> WHERE GroupGraphPattern . (rule 39) $default reduce using rule 39 (WhereClause) state 87 GraphPatternNotTriples -> OPTIONAL . GroupGraphPattern (rule 75) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 144 state 88 Filter -> FILTER . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 139) Filter -> FILTER . BuiltInCall (rule 140) Filter -> FILTER . FunctionCall (rule 141) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 LEFT_PAREN shift, and go to state 155 LESS_THAN shift, and go to state 58 IRIref go to state 156 PrefixedName go to state 60 BuiltInCall go to state 157 RegexExpression go to state 158 FunctionCall go to state 159 state 89 GraphPatternNotTriples -> GRAPH . Var GroupGraphPattern (rule 78) GraphPatternNotTriples -> GRAPH . BlankNode GroupGraphPattern (rule 79) GraphPatternNotTriples -> GRAPH . IRIref GroupGraphPattern (rule 80) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 ANON shift, and go to state 55 LESS_THAN shift, and go to state 58 IRIref go to state 160 PrefixedName go to state 60 Var go to state 161 BlankNode go to state 162 state 90 GroupGraphPattern -> LEFT_CURLY RIGHT_CURLY . (rule 61) $default reduce using rule 61 (GroupGraphPattern) state 91 GraphPatternNotTriples -> GroupGraphPattern . (rule 76) GraphPatternNotTriples -> GroupGraphPattern . AlternativeGroupGraphPatterns (rule 77) UNION shift, and go to state 163 $default reduce using rule 76 (GraphPatternNotTriples) AlternativeGroupGraphPatterns go to state 164 state 92 GroupGraphPattern -> LEFT_CURLY GraphPatternList . RIGHT_CURLY (rule 64) GraphPatternList -> GraphPatternList . GraphPattern (rule 66) OPTIONAL shift, and go to state 87 FILTER shift, and go to state 88 GRAPH shift, and go to state 89 LEFT_CURLY shift, and go to state 32 RIGHT_CURLY shift, and go to state 165 GroupGraphPattern go to state 91 GraphPattern go to state 166 GraphPatternNotTriples go to state 94 Filter go to state 95 state 93 GraphPatternList -> GraphPattern . (rule 65) $default reduce using rule 65 (GraphPatternList) state 94 GraphPattern -> GraphPatternNotTriples . Triples (rule 71) GraphPattern -> GraphPatternNotTriples . DOT Triples (rule 72) GraphPattern -> GraphPatternNotTriples . (rule 73) GraphPattern -> GraphPatternNotTriples . DOT (rule 74) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 DOT shift, and go to state 167 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 $default reduce using rule 73 (GraphPattern) IRIref go to state 59 PrefixedName go to state 60 Triples go to state 168 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 95 GraphPattern -> Filter . Triples (rule 67) GraphPattern -> Filter . DOT Triples (rule 68) GraphPattern -> Filter . DOT (rule 69) GraphPattern -> Filter . (rule 70) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 DOT shift, and go to state 169 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 $default reduce using rule 70 (GraphPattern) IRIref go to state 59 PrefixedName go to state 60 Triples go to state 170 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 96 GroupGraphPattern -> LEFT_CURLY Triples . GraphPatternList RIGHT_CURLY (rule 62) GroupGraphPattern -> LEFT_CURLY Triples . RIGHT_CURLY (rule 63) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 OPTIONAL shift, and go to state 87 FILTER shift, and go to state 88 GRAPH shift, and go to state 89 LEFT_CURLY shift, and go to state 32 RIGHT_CURLY shift, and go to state 171 GroupGraphPattern go to state 91 GraphPatternList go to state 172 GraphPattern go to state 93 GraphPatternNotTriples go to state 94 Filter go to state 95 state 97 DataSetClauseList -> DataSetClauseList DataSetClause . (rule 36) $default reduce using rule 36 (DataSetClauseList) state 98 AskQuery -> ASK DataSetClauseList WhereClause . (rule 22) $default reduce using rule 22 (AskQuery) state 99 PrefixDecl -> PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN . (rule 18) $default reduce using rule 18 (PrefixDecl) state 100 TriplesNode -> LEFT_SQUARE . PropertyList RIGHT_SQUARE (rule 163) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 $default reduce using rule 151 (PropertyList) IRIref go to state 108 PrefixedName go to state 60 PropertyList go to state 173 PropertyListNotEmpty go to state 174 Verb go to state 110 Var go to state 111 state 101 GraphNodeList -> GraphNode . (rule 165) $default reduce using rule 165 (GraphNodeList) state 102 GraphNode -> TriplesNode . (rule 157) $default reduce using rule 157 (GraphNode) state 103 TriplesNode -> Collection . (rule 162) $default reduce using rule 162 (TriplesNode) state 104 Collection -> LEFT_PAREN GraphNodeList . RIGHT_PAREN (rule 164) GraphNodeList -> GraphNodeList . GraphNode (rule 166) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 RIGHT_PAREN shift, and go to state 175 LEFT_SQUARE shift, and go to state 100 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 GraphNode go to state 176 TriplesNode go to state 102 Collection go to state 103 Var go to state 105 GraphTerm go to state 106 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 105 GraphNode -> Var . (rule 156) $default reduce using rule 156 (GraphNode) state 106 GraphNode -> GraphTerm . (rule 158) $default reduce using rule 158 (GraphNode) state 107 Verb -> A . (rule 161) $default reduce using rule 161 (Verb) state 108 Verb -> IRIref . (rule 160) $default reduce using rule 160 (Verb) state 109 TriplesSameSubject -> LEFT_SQUARE PropertyListNotEmpty . RIGHT_SQUARE PropertyList (rule 147) RIGHT_SQUARE shift, and go to state 177 state 110 PropertyListNotEmpty -> Verb . ObjectList (rule 152) PropertyListNotEmpty -> Verb . ObjectList SEMICOLON PropertyList (rule 153) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 100 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 ObjectList go to state 178 GraphNode go to state 179 TriplesNode go to state 102 Collection go to state 103 Var go to state 105 GraphTerm go to state 106 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 111 Verb -> Var . (rule 159) $default reduce using rule 159 (Verb) state 112 String -> STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 . (rule 188) $default reduce using rule 188 (String) state 113 String -> STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 . STRING_LITERAL_DELIMETER_1 (rule 184) STRING_LITERAL_DELIMETER_1 shift, and go to state 180 state 114 String -> STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 . (rule 190) $default reduce using rule 190 (String) state 115 String -> STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 . STRING_LITERAL_DELIMETER_2 (rule 186) STRING_LITERAL_DELIMETER_2 shift, and go to state 181 state 116 String -> STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 . (rule 189) $default reduce using rule 189 (String) state 117 String -> STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 . STRING_LITERAL_DELIMETER_3 (rule 185) STRING_LITERAL_DELIMETER_3 shift, and go to state 182 state 118 String -> STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 . (rule 191) $default reduce using rule 191 (String) state 119 String -> STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 . STRING_LITERAL_DELIMETER_4 (rule 187) STRING_LITERAL_DELIMETER_4 shift, and go to state 183 state 120 GraphTerm -> PLUS NumericLiteral . (rule 171) $default reduce using rule 171 (GraphTerm) state 121 GraphTerm -> MINUS NumericLiteral . (rule 172) $default reduce using rule 172 (GraphTerm) state 122 IRIref -> LESS_THAN Q_IRI_CONTENT . GREATER_THAN (rule 31) GREATER_THAN shift, and go to state 184 state 123 Triples -> Triples DOT . TriplesSameSubject (rule 142) Triples -> Triples DOT . (rule 143) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 $default reduce using rule 143 (Triples) IRIref go to state 59 PrefixedName go to state 60 TriplesSameSubject go to state 185 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 124 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY . DataSetClauseList WhereClause SolutionModifier (rule 10) ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY . WhereClause SolutionModifier (rule 11) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClauseList go to state 186 DataSetClause go to state 34 WhereClause go to state 187 GroupGraphPattern go to state 36 state 125 TriplesSameSubject -> Collection PropertyListNotEmpty . (rule 148) $default reduce using rule 148 (TriplesSameSubject) state 126 TriplesSameSubject -> Var PropertyListNotEmpty . (rule 145) $default reduce using rule 145 (TriplesSameSubject) state 127 TriplesSameSubject -> GraphTerm PropertyListNotEmpty . (rule 146) $default reduce using rule 146 (TriplesSameSubject) state 128 RDFLiteral -> String DOUBLE_HAT . IRIref (rule 181) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 LESS_THAN shift, and go to state 58 IRIref go to state 188 PrefixedName go to state 60 state 129 RDFLiteral -> String LANGTAG . (rule 180) $default reduce using rule 180 (RDFLiteral) state 130 OrderClause -> ORDER BY . OrderConditionList (rule 50) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 ASC shift, and go to state 189 DESC shift, and go to state 190 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 LEFT_PAREN shift, and go to state 191 LESS_THAN shift, and go to state 58 IRIref go to state 156 PrefixedName go to state 60 OrderConditionList go to state 192 OrderCondition go to state 193 BuiltInCall go to state 194 RegexExpression go to state 158 FunctionCall go to state 195 Var go to state 196 state 131 LimitClause -> LIMIT NumericLiteral . (rule 59) $default reduce using rule 59 (LimitClause) state 132 OffsetClause -> OFFSET NumericLiteral . (rule 60) $default reduce using rule 60 (OffsetClause) state 133 DescribeQuery -> DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause . SolutionModifier (rule 6) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 197 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 134 DescribeQuery -> DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier . (rule 8) $default reduce using rule 8 (DescribeQuery) state 135 DescribeQuery -> DESCRIBE VAR_REFERENCES WhereClause SolutionModifier . (rule 9) $default reduce using rule 9 (DescribeQuery) state 136 SolutionModifier -> OrderClause LimitClause . (rule 43) SolutionModifier -> OrderClause LimitClause . OffsetClause (rule 44) OFFSET shift, and go to state 73 $default reduce using rule 43 (SolutionModifier) OffsetClause go to state 198 state 137 SolutionModifier -> OrderClause OffsetClause . LimitClause (rule 45) SolutionModifier -> OrderClause OffsetClause . (rule 47) LIMIT shift, and go to state 72 $default reduce using rule 47 (SolutionModifier) LimitClause go to state 199 state 138 SolutionModifier -> LimitClause OffsetClause . (rule 46) $default reduce using rule 46 (SolutionModifier) state 139 SelectQuery -> SELECT DISTINCT VAR_REFERENCES DataSetClauseList . WhereClause SolutionModifier (rule 26) DataSetClauseList -> DataSetClauseList . DataSetClause (rule 36) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClause go to state 97 WhereClause go to state 200 GroupGraphPattern go to state 36 state 140 SelectQuery -> SELECT DISTINCT VAR_REFERENCES WhereClause . SolutionModifier (rule 25) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 201 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 141 SelectQuery -> SELECT VAR_REFERENCES DataSetClauseList WhereClause . SolutionModifier (rule 24) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 202 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 142 SelectQuery -> SELECT VAR_REFERENCES WhereClause SolutionModifier . (rule 23) $default reduce using rule 23 (SelectQuery) state 143 DataSetClause -> FROM NAMED IRIref . (rule 38) $default reduce using rule 38 (DataSetClause) state 144 GraphPatternNotTriples -> OPTIONAL GroupGraphPattern . (rule 75) $default reduce using rule 75 (GraphPatternNotTriples) state 145 BuiltInCall -> STR . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 114) LEFT_PAREN shift, and go to state 203 state 146 BuiltInCall -> LANG . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 115) LEFT_PAREN shift, and go to state 204 state 147 BuiltInCall -> LANGMATCHES . LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 116) LEFT_PAREN shift, and go to state 205 state 148 BuiltInCall -> DATATYPE . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 117) LEFT_PAREN shift, and go to state 206 state 149 BuiltInCall -> isIRI . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 119) LEFT_PAREN shift, and go to state 207 state 150 BuiltInCall -> isURI . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 120) LEFT_PAREN shift, and go to state 208 state 151 BuiltInCall -> isLITERAL . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 122) LEFT_PAREN shift, and go to state 209 state 152 BuiltInCall -> isBLANK . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 121) LEFT_PAREN shift, and go to state 210 state 153 BuiltInCall -> BOUND . LEFT_PAREN Var RIGHT_PAREN (rule 118) LEFT_PAREN shift, and go to state 211 state 154 RegexExpression -> REGEX . LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 124) RegexExpression -> REGEX . LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 125) LEFT_PAREN shift, and go to state 212 state 155 Filter -> FILTER LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 139) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 218 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 156 FunctionCall -> IRIref . LEFT_PAREN ArgumentList RIGHT_PAREN (rule 126) FunctionCall -> IRIref . NIL (rule 127) LEFT_PAREN shift, and go to state 232 NIL shift, and go to state 233 state 157 Filter -> FILTER BuiltInCall . (rule 140) $default reduce using rule 140 (Filter) state 158 BuiltInCall -> RegexExpression . (rule 123) $default reduce using rule 123 (BuiltInCall) state 159 Filter -> FILTER FunctionCall . (rule 141) $default reduce using rule 141 (Filter) state 160 GraphPatternNotTriples -> GRAPH IRIref . GroupGraphPattern (rule 80) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 234 state 161 GraphPatternNotTriples -> GRAPH Var . GroupGraphPattern (rule 78) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 235 state 162 GraphPatternNotTriples -> GRAPH BlankNode . GroupGraphPattern (rule 79) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 236 state 163 AlternativeGroupGraphPatterns -> UNION . GroupGraphPattern (rule 81) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 237 state 164 GraphPatternNotTriples -> GroupGraphPattern AlternativeGroupGraphPatterns . (rule 77) AlternativeGroupGraphPatterns -> AlternativeGroupGraphPatterns . UNION GroupGraphPattern (rule 82) UNION shift, and go to state 238 $default reduce using rule 77 (GraphPatternNotTriples) state 165 GroupGraphPattern -> LEFT_CURLY GraphPatternList RIGHT_CURLY . (rule 64) $default reduce using rule 64 (GroupGraphPattern) state 166 GraphPatternList -> GraphPatternList GraphPattern . (rule 66) $default reduce using rule 66 (GraphPatternList) state 167 GraphPattern -> GraphPatternNotTriples DOT . Triples (rule 72) GraphPattern -> GraphPatternNotTriples DOT . (rule 74) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 $default reduce using rule 74 (GraphPattern) IRIref go to state 59 PrefixedName go to state 60 Triples go to state 239 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 168 GraphPattern -> GraphPatternNotTriples Triples . (rule 71) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 $default reduce using rule 71 (GraphPattern) state 169 GraphPattern -> Filter DOT . Triples (rule 68) GraphPattern -> Filter DOT . (rule 69) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 46 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 $default reduce using rule 69 (GraphPattern) IRIref go to state 59 PrefixedName go to state 60 Triples go to state 240 TriplesSameSubject go to state 62 Collection go to state 63 Var go to state 64 GraphTerm go to state 65 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 170 GraphPattern -> Filter Triples . (rule 67) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 $default reduce using rule 67 (GraphPattern) state 171 GroupGraphPattern -> LEFT_CURLY Triples RIGHT_CURLY . (rule 63) $default reduce using rule 63 (GroupGraphPattern) state 172 GroupGraphPattern -> LEFT_CURLY Triples GraphPatternList . RIGHT_CURLY (rule 62) GraphPatternList -> GraphPatternList . GraphPattern (rule 66) OPTIONAL shift, and go to state 87 FILTER shift, and go to state 88 GRAPH shift, and go to state 89 LEFT_CURLY shift, and go to state 32 RIGHT_CURLY shift, and go to state 241 GroupGraphPattern go to state 91 GraphPattern go to state 166 GraphPatternNotTriples go to state 94 Filter go to state 95 state 173 TriplesNode -> LEFT_SQUARE PropertyList . RIGHT_SQUARE (rule 163) RIGHT_SQUARE shift, and go to state 242 state 174 PropertyList -> PropertyListNotEmpty . (rule 150) $default reduce using rule 150 (PropertyList) state 175 Collection -> LEFT_PAREN GraphNodeList RIGHT_PAREN . (rule 164) $default reduce using rule 164 (Collection) state 176 GraphNodeList -> GraphNodeList GraphNode . (rule 166) $default reduce using rule 166 (GraphNodeList) state 177 TriplesSameSubject -> LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE . PropertyList (rule 147) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 $default reduce using rule 151 (PropertyList) IRIref go to state 108 PrefixedName go to state 60 PropertyList go to state 243 PropertyListNotEmpty go to state 174 Verb go to state 110 Var go to state 111 state 178 PropertyListNotEmpty -> Verb ObjectList . (rule 152) PropertyListNotEmpty -> Verb ObjectList . SEMICOLON PropertyList (rule 153) ObjectList -> ObjectList . COMMA GraphNode (rule 155) COMMA shift, and go to state 244 SEMICOLON shift, and go to state 245 $default reduce using rule 152 (PropertyListNotEmpty) state 179 ObjectList -> GraphNode . (rule 154) $default reduce using rule 154 (ObjectList) state 180 String -> STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 . (rule 184) $default reduce using rule 184 (String) state 181 String -> STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 . (rule 186) $default reduce using rule 186 (String) state 182 String -> STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 . (rule 185) $default reduce using rule 185 (String) state 183 String -> STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 . (rule 187) $default reduce using rule 187 (String) state 184 IRIref -> LESS_THAN Q_IRI_CONTENT GREATER_THAN . (rule 31) $default reduce using rule 31 (IRIref) state 185 Triples -> Triples DOT TriplesSameSubject . (rule 142) $default reduce using rule 142 (Triples) state 186 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList . WhereClause SolutionModifier (rule 10) DataSetClauseList -> DataSetClauseList . DataSetClause (rule 36) FROM shift, and go to state 30 WHERE shift, and go to state 31 LEFT_CURLY shift, and go to state 32 DataSetClause go to state 97 WhereClause go to state 246 GroupGraphPattern go to state 36 state 187 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause . SolutionModifier (rule 11) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 247 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 188 RDFLiteral -> String DOUBLE_HAT IRIref . (rule 181) $default reduce using rule 181 (RDFLiteral) state 189 OrderCondition -> ASC . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 53) LEFT_PAREN shift, and go to state 248 state 190 OrderCondition -> DESC . LEFT_PAREN ConditionalOrExpression RIGHT_PAREN (rule 54) LEFT_PAREN shift, and go to state 249 state 191 OrderCondition -> LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 57) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 250 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 192 OrderClause -> ORDER BY OrderConditionList . (rule 50) OrderConditionList -> OrderConditionList . OrderCondition (rule 52) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 ASC shift, and go to state 189 DESC shift, and go to state 190 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 LEFT_PAREN shift, and go to state 191 LESS_THAN shift, and go to state 58 $default reduce using rule 50 (OrderClause) IRIref go to state 156 PrefixedName go to state 60 OrderCondition go to state 251 BuiltInCall go to state 194 RegexExpression go to state 158 FunctionCall go to state 195 Var go to state 196 state 193 OrderConditionList -> OrderCondition . (rule 51) $default reduce using rule 51 (OrderConditionList) state 194 OrderCondition -> BuiltInCall . (rule 56) $default reduce using rule 56 (OrderCondition) state 195 OrderCondition -> FunctionCall . (rule 55) $default reduce using rule 55 (OrderCondition) state 196 OrderCondition -> Var . (rule 58) $default reduce using rule 58 (OrderCondition) state 197 DescribeQuery -> DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier . (rule 6) $default reduce using rule 6 (DescribeQuery) state 198 SolutionModifier -> OrderClause LimitClause OffsetClause . (rule 44) $default reduce using rule 44 (SolutionModifier) state 199 SolutionModifier -> OrderClause OffsetClause LimitClause . (rule 45) $default reduce using rule 45 (SolutionModifier) state 200 SelectQuery -> SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause . SolutionModifier (rule 26) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 252 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 201 SelectQuery -> SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier . (rule 25) $default reduce using rule 25 (SelectQuery) state 202 SelectQuery -> SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier . (rule 24) $default reduce using rule 24 (SelectQuery) state 203 BuiltInCall -> STR LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 114) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 253 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 204 BuiltInCall -> LANG LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 115) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 254 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 205 BuiltInCall -> LANGMATCHES LEFT_PAREN . ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 116) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 255 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 206 BuiltInCall -> DATATYPE LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 117) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 256 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 207 BuiltInCall -> isIRI LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 119) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 257 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 208 BuiltInCall -> isURI LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 120) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 258 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 209 BuiltInCall -> isLITERAL LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 122) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 259 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 210 BuiltInCall -> isBLANK LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 121) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 260 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 211 BuiltInCall -> BOUND LEFT_PAREN . Var RIGHT_PAREN (rule 118) VARNAME shift, and go to state 23 Var go to state 261 state 212 RegexExpression -> REGEX LEFT_PAREN . ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 124) RegexExpression -> REGEX LEFT_PAREN . ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 125) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 262 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 213 UnaryExpression -> BANG . PrimaryExpression (rule 110) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 263 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 214 PrimaryExpression -> LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 130) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 264 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 215 UnaryExpression -> PLUS . PrimaryExpression (rule 111) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 265 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 216 UnaryExpression -> MINUS . PrimaryExpression (rule 112) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 266 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 217 FunctionCall -> IRIref . LEFT_PAREN ArgumentList RIGHT_PAREN (rule 126) FunctionCall -> IRIref . NIL (rule 127) PrimaryExpression -> IRIref . (rule 132) LEFT_PAREN shift, and go to state 232 NIL shift, and go to state 233 $default reduce using rule 132 (PrimaryExpression) state 218 Filter -> FILTER LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 139) RIGHT_PAREN shift, and go to state 267 state 219 ConditionalOrExpression -> ConditionalAndExpression . (rule 83) ConditionalOrExpression -> ConditionalAndExpression . ConditionalAndExpressionList (rule 84) DOUBLE_PIPE shift, and go to state 268 $default reduce using rule 83 (ConditionalOrExpression) ConditionalAndExpressionList go to state 269 state 220 ConditionalAndExpression -> RelationalExpression . ValueLogicalList (rule 87) ConditionalAndExpression -> RelationalExpression . (rule 88) DOUBLE_AMPERSAND shift, and go to state 270 $default reduce using rule 88 (ConditionalAndExpression) ValueLogicalList go to state 271 state 221 RelationalExpression -> AdditiveExpression . (rule 91) RelationalExpression -> AdditiveExpression . EQUALITY_OP AdditiveExpression (rule 92) RelationalExpression -> AdditiveExpression . NOT_EQUAL AdditiveExpression (rule 93) RelationalExpression -> AdditiveExpression . LESS_THAN AdditiveExpression (rule 94) RelationalExpression -> AdditiveExpression . GREATER_THAN AdditiveExpression (rule 95) RelationalExpression -> AdditiveExpression . LESS_THAN_EQUAL AdditiveExpression (rule 96) RelationalExpression -> AdditiveExpression . GREATER_THAN_EQUAL AdditiveExpression (rule 97) EQUALITY_OP shift, and go to state 272 NOT_EQUAL shift, and go to state 273 LESS_THAN shift, and go to state 274 GREATER_THAN shift, and go to state 275 LESS_THAN_EQUAL shift, and go to state 276 GREATER_THAN_EQUAL shift, and go to state 277 $default reduce using rule 91 (RelationalExpression) state 222 AdditiveExpression -> MultiplicativeExpression . (rule 98) AdditiveExpression -> MultiplicativeExpression . MultiplicativeExpressionList (rule 99) PLUS shift, and go to state 278 MINUS shift, and go to state 279 $default reduce using rule 98 (AdditiveExpression) MultiplicativeExpressionList go to state 280 state 223 MultiplicativeExpression -> UnaryExpression . (rule 104) MultiplicativeExpression -> UnaryExpression . UnaryExpressionList (rule 105) ASTERISK shift, and go to state 281 FORWARDSLASH shift, and go to state 282 $default reduce using rule 104 (MultiplicativeExpression) UnaryExpressionList go to state 283 state 224 PrimaryExpression -> BuiltInCall . (rule 131) $default reduce using rule 131 (PrimaryExpression) state 225 PrimaryExpression -> FunctionCall . (rule 133) $default reduce using rule 133 (PrimaryExpression) state 226 UnaryExpression -> PrimaryExpression . (rule 113) $default reduce using rule 113 (UnaryExpression) state 227 PrimaryExpression -> Var . (rule 138) $default reduce using rule 138 (PrimaryExpression) state 228 PrimaryExpression -> NumericLiteral . (rule 135) $default reduce using rule 135 (PrimaryExpression) state 229 PrimaryExpression -> RDFLiteral . (rule 134) $default reduce using rule 134 (PrimaryExpression) state 230 PrimaryExpression -> BooleanLiteral . (rule 136) $default reduce using rule 136 (PrimaryExpression) state 231 PrimaryExpression -> BlankNode . (rule 137) $default reduce using rule 137 (PrimaryExpression) state 232 FunctionCall -> IRIref LEFT_PAREN . ArgumentList RIGHT_PAREN (rule 126) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 284 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 ArgumentList go to state 285 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 233 FunctionCall -> IRIref NIL . (rule 127) $default reduce using rule 127 (FunctionCall) state 234 GraphPatternNotTriples -> GRAPH IRIref GroupGraphPattern . (rule 80) $default reduce using rule 80 (GraphPatternNotTriples) state 235 GraphPatternNotTriples -> GRAPH Var GroupGraphPattern . (rule 78) $default reduce using rule 78 (GraphPatternNotTriples) state 236 GraphPatternNotTriples -> GRAPH BlankNode GroupGraphPattern . (rule 79) $default reduce using rule 79 (GraphPatternNotTriples) state 237 AlternativeGroupGraphPatterns -> UNION GroupGraphPattern . (rule 81) $default reduce using rule 81 (AlternativeGroupGraphPatterns) state 238 AlternativeGroupGraphPatterns -> AlternativeGroupGraphPatterns UNION . GroupGraphPattern (rule 82) LEFT_CURLY shift, and go to state 32 GroupGraphPattern go to state 286 state 239 GraphPattern -> GraphPatternNotTriples DOT Triples . (rule 72) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 $default reduce using rule 72 (GraphPattern) state 240 GraphPattern -> Filter DOT Triples . (rule 68) Triples -> Triples . DOT TriplesSameSubject (rule 142) Triples -> Triples . DOT (rule 143) DOT shift, and go to state 123 $default reduce using rule 68 (GraphPattern) state 241 GroupGraphPattern -> LEFT_CURLY Triples GraphPatternList RIGHT_CURLY . (rule 62) $default reduce using rule 62 (GroupGraphPattern) state 242 TriplesNode -> LEFT_SQUARE PropertyList RIGHT_SQUARE . (rule 163) $default reduce using rule 163 (TriplesNode) state 243 TriplesSameSubject -> LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList . (rule 147) $default reduce using rule 147 (TriplesSameSubject) state 244 ObjectList -> ObjectList COMMA . GraphNode (rule 155) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 TRUE shift, and go to state 43 FALSE shift, and go to state 44 LEFT_PAREN shift, and go to state 45 LEFT_SQUARE shift, and go to state 100 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 NIL shift, and go to state 54 ANON shift, and go to state 55 PLUS shift, and go to state 56 MINUS shift, and go to state 57 LESS_THAN shift, and go to state 58 IRIref go to state 59 PrefixedName go to state 60 GraphNode go to state 287 TriplesNode go to state 102 Collection go to state 103 Var go to state 105 GraphTerm go to state 106 NumericLiteral go to state 66 RDFLiteral go to state 67 BooleanLiteral go to state 68 String go to state 69 BlankNode go to state 70 state 245 PropertyListNotEmpty -> Verb ObjectList SEMICOLON . PropertyList (rule 153) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 VARNAME shift, and go to state 23 A shift, and go to state 107 LESS_THAN shift, and go to state 58 $default reduce using rule 151 (PropertyList) IRIref go to state 108 PrefixedName go to state 60 PropertyList go to state 288 PropertyListNotEmpty go to state 174 Verb go to state 110 Var go to state 111 state 246 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause . SolutionModifier (rule 10) ORDER shift, and go to state 71 LIMIT shift, and go to state 72 OFFSET shift, and go to state 73 $default reduce using rule 41 (SolutionModifier) SolutionModifier go to state 289 OrderClause go to state 77 LimitClause go to state 78 OffsetClause go to state 79 state 247 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier . (rule 11) $default reduce using rule 11 (ConstructQuery) state 248 OrderCondition -> ASC LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 53) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 290 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 249 OrderCondition -> DESC LEFT_PAREN . ConditionalOrExpression RIGHT_PAREN (rule 54) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 291 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 250 OrderCondition -> LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 57) RIGHT_PAREN shift, and go to state 292 state 251 OrderConditionList -> OrderConditionList OrderCondition . (rule 52) $default reduce using rule 52 (OrderConditionList) state 252 SelectQuery -> SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier . (rule 26) $default reduce using rule 26 (SelectQuery) state 253 BuiltInCall -> STR LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 114) RIGHT_PAREN shift, and go to state 293 state 254 BuiltInCall -> LANG LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 115) RIGHT_PAREN shift, and go to state 294 state 255 BuiltInCall -> LANGMATCHES LEFT_PAREN ConditionalOrExpression . COMMA ConditionalOrExpression RIGHT_PAREN (rule 116) COMMA shift, and go to state 295 state 256 BuiltInCall -> DATATYPE LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 117) RIGHT_PAREN shift, and go to state 296 state 257 BuiltInCall -> isIRI LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 119) RIGHT_PAREN shift, and go to state 297 state 258 BuiltInCall -> isURI LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 120) RIGHT_PAREN shift, and go to state 298 state 259 BuiltInCall -> isLITERAL LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 122) RIGHT_PAREN shift, and go to state 299 state 260 BuiltInCall -> isBLANK LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 121) RIGHT_PAREN shift, and go to state 300 state 261 BuiltInCall -> BOUND LEFT_PAREN Var . RIGHT_PAREN (rule 118) RIGHT_PAREN shift, and go to state 301 state 262 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression . COMMA ConditionalOrExpression RIGHT_PAREN (rule 124) RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression . COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 125) COMMA shift, and go to state 302 state 263 UnaryExpression -> BANG PrimaryExpression . (rule 110) $default reduce using rule 110 (UnaryExpression) state 264 PrimaryExpression -> LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 130) RIGHT_PAREN shift, and go to state 303 state 265 UnaryExpression -> PLUS PrimaryExpression . (rule 111) $default reduce using rule 111 (UnaryExpression) state 266 UnaryExpression -> MINUS PrimaryExpression . (rule 112) $default reduce using rule 112 (UnaryExpression) state 267 Filter -> FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 139) $default reduce using rule 139 (Filter) state 268 ConditionalAndExpressionList -> DOUBLE_PIPE . ConditionalAndExpression (rule 85) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalAndExpression go to state 304 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 269 ConditionalOrExpression -> ConditionalAndExpression ConditionalAndExpressionList . (rule 84) ConditionalAndExpressionList -> ConditionalAndExpressionList . DOUBLE_PIPE ConditionalAndExpression (rule 86) DOUBLE_PIPE shift, and go to state 305 $default reduce using rule 84 (ConditionalOrExpression) state 270 ValueLogicalList -> DOUBLE_AMPERSAND . RelationalExpression (rule 89) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 RelationalExpression go to state 306 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 271 ConditionalAndExpression -> RelationalExpression ValueLogicalList . (rule 87) ValueLogicalList -> ValueLogicalList . DOUBLE_AMPERSAND RelationalExpression (rule 90) DOUBLE_AMPERSAND shift, and go to state 307 $default reduce using rule 87 (ConditionalAndExpression) state 272 RelationalExpression -> AdditiveExpression EQUALITY_OP . AdditiveExpression (rule 92) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 308 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 273 RelationalExpression -> AdditiveExpression NOT_EQUAL . AdditiveExpression (rule 93) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 309 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 274 RelationalExpression -> AdditiveExpression LESS_THAN . AdditiveExpression (rule 94) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 310 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 275 RelationalExpression -> AdditiveExpression GREATER_THAN . AdditiveExpression (rule 95) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 311 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 276 RelationalExpression -> AdditiveExpression LESS_THAN_EQUAL . AdditiveExpression (rule 96) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 312 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 277 RelationalExpression -> AdditiveExpression GREATER_THAN_EQUAL . AdditiveExpression (rule 97) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 AdditiveExpression go to state 313 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 278 MultiplicativeExpressionList -> PLUS . MultiplicativeExpression (rule 100) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 MultiplicativeExpression go to state 314 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 279 MultiplicativeExpressionList -> MINUS . MultiplicativeExpression (rule 101) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 MultiplicativeExpression go to state 315 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 280 AdditiveExpression -> MultiplicativeExpression MultiplicativeExpressionList . (rule 99) MultiplicativeExpressionList -> MultiplicativeExpressionList . MINUS MultiplicativeExpression (rule 102) MultiplicativeExpressionList -> MultiplicativeExpressionList . PLUS MultiplicativeExpression (rule 103) PLUS shift, and go to state 316 MINUS shift, and go to state 317 $default reduce using rule 99 (AdditiveExpression) state 281 UnaryExpressionList -> ASTERISK . UnaryExpression (rule 106) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 UnaryExpression go to state 318 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 282 UnaryExpressionList -> FORWARDSLASH . UnaryExpression (rule 107) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 UnaryExpression go to state 319 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 283 MultiplicativeExpression -> UnaryExpression UnaryExpressionList . (rule 105) UnaryExpressionList -> UnaryExpressionList . ASTERISK UnaryExpression (rule 108) UnaryExpressionList -> UnaryExpressionList . FORWARDSLASH UnaryExpression (rule 109) ASTERISK shift, and go to state 320 FORWARDSLASH shift, and go to state 321 $default reduce using rule 105 (MultiplicativeExpression) state 284 ArgumentList -> ConditionalOrExpression . (rule 128) ArgumentList -> ConditionalOrExpression . COMMA ArgumentList (rule 129) COMMA shift, and go to state 322 $default reduce using rule 128 (ArgumentList) state 285 FunctionCall -> IRIref LEFT_PAREN ArgumentList . RIGHT_PAREN (rule 126) RIGHT_PAREN shift, and go to state 323 state 286 AlternativeGroupGraphPatterns -> AlternativeGroupGraphPatterns UNION GroupGraphPattern . (rule 82) $default reduce using rule 82 (AlternativeGroupGraphPatterns) state 287 ObjectList -> ObjectList COMMA GraphNode . (rule 155) $default reduce using rule 155 (ObjectList) state 288 PropertyListNotEmpty -> Verb ObjectList SEMICOLON PropertyList . (rule 153) $default reduce using rule 153 (PropertyListNotEmpty) state 289 ConstructQuery -> CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier . (rule 10) $default reduce using rule 10 (ConstructQuery) state 290 OrderCondition -> ASC LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 53) RIGHT_PAREN shift, and go to state 324 state 291 OrderCondition -> DESC LEFT_PAREN ConditionalOrExpression . RIGHT_PAREN (rule 54) RIGHT_PAREN shift, and go to state 325 state 292 OrderCondition -> LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 57) $default reduce using rule 57 (OrderCondition) state 293 BuiltInCall -> STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 114) $default reduce using rule 114 (BuiltInCall) state 294 BuiltInCall -> LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 115) $default reduce using rule 115 (BuiltInCall) state 295 BuiltInCall -> LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA . ConditionalOrExpression RIGHT_PAREN (rule 116) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 326 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 296 BuiltInCall -> DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 117) $default reduce using rule 117 (BuiltInCall) state 297 BuiltInCall -> isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 119) $default reduce using rule 119 (BuiltInCall) state 298 BuiltInCall -> isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 120) $default reduce using rule 120 (BuiltInCall) state 299 BuiltInCall -> isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 122) $default reduce using rule 122 (BuiltInCall) state 300 BuiltInCall -> isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 121) $default reduce using rule 121 (BuiltInCall) state 301 BuiltInCall -> BOUND LEFT_PAREN Var RIGHT_PAREN . (rule 118) $default reduce using rule 118 (BuiltInCall) state 302 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA . ConditionalOrExpression RIGHT_PAREN (rule 124) RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA . ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN (rule 125) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 327 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 303 PrimaryExpression -> LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 130) $default reduce using rule 130 (PrimaryExpression) state 304 ConditionalAndExpressionList -> DOUBLE_PIPE ConditionalAndExpression . (rule 85) $default reduce using rule 85 (ConditionalAndExpressionList) state 305 ConditionalAndExpressionList -> ConditionalAndExpressionList DOUBLE_PIPE . ConditionalAndExpression (rule 86) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalAndExpression go to state 328 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 306 ValueLogicalList -> DOUBLE_AMPERSAND RelationalExpression . (rule 89) $default reduce using rule 89 (ValueLogicalList) state 307 ValueLogicalList -> ValueLogicalList DOUBLE_AMPERSAND . RelationalExpression (rule 90) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 RelationalExpression go to state 329 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 308 RelationalExpression -> AdditiveExpression EQUALITY_OP AdditiveExpression . (rule 92) $default reduce using rule 92 (RelationalExpression) state 309 RelationalExpression -> AdditiveExpression NOT_EQUAL AdditiveExpression . (rule 93) $default reduce using rule 93 (RelationalExpression) state 310 RelationalExpression -> AdditiveExpression LESS_THAN AdditiveExpression . (rule 94) $default reduce using rule 94 (RelationalExpression) state 311 RelationalExpression -> AdditiveExpression GREATER_THAN AdditiveExpression . (rule 95) $default reduce using rule 95 (RelationalExpression) state 312 RelationalExpression -> AdditiveExpression LESS_THAN_EQUAL AdditiveExpression . (rule 96) $default reduce using rule 96 (RelationalExpression) state 313 RelationalExpression -> AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression . (rule 97) $default reduce using rule 97 (RelationalExpression) state 314 MultiplicativeExpressionList -> PLUS MultiplicativeExpression . (rule 100) $default reduce using rule 100 (MultiplicativeExpressionList) state 315 MultiplicativeExpressionList -> MINUS MultiplicativeExpression . (rule 101) $default reduce using rule 101 (MultiplicativeExpressionList) state 316 MultiplicativeExpressionList -> MultiplicativeExpressionList PLUS . MultiplicativeExpression (rule 103) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 MultiplicativeExpression go to state 330 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 317 MultiplicativeExpressionList -> MultiplicativeExpressionList MINUS . MultiplicativeExpression (rule 102) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 MultiplicativeExpression go to state 331 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 318 UnaryExpressionList -> ASTERISK UnaryExpression . (rule 106) $default reduce using rule 106 (UnaryExpressionList) state 319 UnaryExpressionList -> FORWARDSLASH UnaryExpression . (rule 107) $default reduce using rule 107 (UnaryExpressionList) state 320 UnaryExpressionList -> UnaryExpressionList ASTERISK . UnaryExpression (rule 108) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 UnaryExpression go to state 332 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 321 UnaryExpressionList -> UnaryExpressionList FORWARDSLASH . UnaryExpression (rule 109) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 UnaryExpression go to state 333 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 322 ArgumentList -> ConditionalOrExpression COMMA . ArgumentList (rule 129) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 284 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 ArgumentList go to state 334 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 323 FunctionCall -> IRIref LEFT_PAREN ArgumentList RIGHT_PAREN . (rule 126) $default reduce using rule 126 (FunctionCall) state 324 OrderCondition -> ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 53) $default reduce using rule 53 (OrderCondition) state 325 OrderCondition -> DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN . (rule 54) $default reduce using rule 54 (OrderCondition) state 326 BuiltInCall -> LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression . RIGHT_PAREN (rule 116) RIGHT_PAREN shift, and go to state 335 state 327 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression . RIGHT_PAREN (rule 124) RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression . COMMA ConditionalOrExpression RIGHT_PAREN (rule 125) COMMA shift, and go to state 336 RIGHT_PAREN shift, and go to state 337 state 328 ConditionalAndExpressionList -> ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression . (rule 86) $default reduce using rule 86 (ConditionalAndExpressionList) state 329 ValueLogicalList -> ValueLogicalList DOUBLE_AMPERSAND RelationalExpression . (rule 90) $default reduce using rule 90 (ValueLogicalList) state 330 MultiplicativeExpressionList -> MultiplicativeExpressionList PLUS MultiplicativeExpression . (rule 103) $default reduce using rule 103 (MultiplicativeExpressionList) state 331 MultiplicativeExpressionList -> MultiplicativeExpressionList MINUS MultiplicativeExpression . (rule 102) $default reduce using rule 102 (MultiplicativeExpressionList) state 332 UnaryExpressionList -> UnaryExpressionList ASTERISK UnaryExpression . (rule 108) $default reduce using rule 108 (UnaryExpressionList) state 333 UnaryExpressionList -> UnaryExpressionList FORWARDSLASH UnaryExpression . (rule 109) $default reduce using rule 109 (UnaryExpressionList) state 334 ArgumentList -> ConditionalOrExpression COMMA ArgumentList . (rule 129) $default reduce using rule 129 (ArgumentList) state 335 BuiltInCall -> LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN . (rule 116) $default reduce using rule 116 (BuiltInCall) state 336 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA . ConditionalOrExpression RIGHT_PAREN (rule 125) PNAME_NS shift, and go to state 40 PNAME_LN shift, and go to state 41 BLANK_NODE_LABEL shift, and go to state 42 VARNAME shift, and go to state 23 STR shift, and go to state 145 LANG shift, and go to state 146 LANGMATCHES shift, and go to state 147 DATATYPE shift, and go to state 148 isIRI shift, and go to state 149 isURI shift, and go to state 150 isLITERAL shift, and go to state 151 isBLANK shift, and go to state 152 BOUND shift, and go to state 153 REGEX shift, and go to state 154 TRUE shift, and go to state 43 FALSE shift, and go to state 44 BANG shift, and go to state 213 LEFT_PAREN shift, and go to state 214 INTEGER shift, and go to state 47 DECIMAL shift, and go to state 48 DOUBLE shift, and go to state 49 STRING_LITERAL_DELIMETER_1 shift, and go to state 50 STRING_LITERAL_DELIMETER_2 shift, and go to state 51 STRING_LITERAL_DELIMETER_3 shift, and go to state 52 STRING_LITERAL_DELIMETER_4 shift, and go to state 53 ANON shift, and go to state 55 PLUS shift, and go to state 215 MINUS shift, and go to state 216 LESS_THAN shift, and go to state 58 IRIref go to state 217 PrefixedName go to state 60 ConditionalOrExpression go to state 338 ConditionalAndExpression go to state 219 RelationalExpression go to state 220 AdditiveExpression go to state 221 MultiplicativeExpression go to state 222 UnaryExpression go to state 223 BuiltInCall go to state 224 RegexExpression go to state 158 FunctionCall go to state 225 PrimaryExpression go to state 226 Var go to state 227 NumericLiteral go to state 228 RDFLiteral go to state 229 BooleanLiteral go to state 230 String go to state 69 BlankNode go to state 231 state 337 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN . (rule 124) $default reduce using rule 124 (RegexExpression) state 338 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression . RIGHT_PAREN (rule 125) RIGHT_PAREN shift, and go to state 339 state 339 RegexExpression -> REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN . (rule 125) $default reduce using rule 125 (RegexExpression) state 340 $ go to state 341 state 341 $ go to state 342 state 342 $default accept rdflib-2.4.2/src/bison/SPARQLTurtleSuperSet.bgen.frag0000644000175000017500000003342111153616037021302 0ustar nachonacho Filter FILTER LEFT_PAREN ConditionalOrExpression RIGHT_PAREN $$ = PyObject_CallMethod(Filter, "ParsedExpressionFilter", "O", $3); FILTER BuiltInCall $$ = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", $2); FILTER FunctionCall $$ = PyObject_CallMethod(Filter, "ParsedFunctionFilter", "O", $2); Triples Triples DOT TriplesSameSubject PyList_Append($1, $3); Py_INCREF($1); $$ = $1; Triples DOT TriplesSameSubject $$ = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); TriplesSameSubject Var PropertyListNotEmpty $$ = PyObject_CallMethod(Resource, "Resource", "OO", $1,$2); GraphTerm PropertyListNotEmpty $$ = PyObject_CallMethod(Resource, "Resource", "OO", $1,$2); LEFT_SQUARE PropertyListNotEmpty RIGHT_SQUARE PropertyList $$ = PyObject_CallMethod(Resource, "TwiceReferencedBlankNode", "OO", $2,$4); Collection PropertyListNotEmpty PyObject_CallMethod($1, "setPropertyValueList", "O", $2); Py_INCREF($1); $$ = $1; Collection PropertyList PropertyListNotEmpty $$ = PyList_New(0); PropertyListNotEmpty Verb ObjectList $$ = PyList_New(1); PyList_SET_ITEM($$, 0, PyObject_CallMethod(Triples, "PropertyValue", "OO", $1,$2)); Verb ObjectList SEMICOLON PropertyList $$ = PyObject_CallMethod(Util, "ListPrepend", "OO", PyObject_CallMethod(Triples, "PropertyValue", "OO", $1,$2),$4); ObjectList GraphNode $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); ObjectList COMMA GraphNode PyList_Append($1, $3); Py_INCREF($1); $$ = $1; GraphNode Var TriplesNode GraphTerm Verb Var IRIref A $$ = PyObject_GetAttrString(RDF, "type"); TriplesNode Collection LEFT_SQUARE PropertyList RIGHT_SQUARE Py_INCREF(Py_None); $$ = PyObject_CallMethod(Resource, "Resource", "OO", Py_None,$2); Collection LEFT_PAREN GraphNodeList RIGHT_PAREN $$ = PyObject_CallMethod(Resource, "ParsedCollection", "O", $2); GraphNodeList GraphNode $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); GraphNodeList GraphNode PyList_Append($1, $2); Py_INCREF($1); $$ = $1; Var VARNAME PyObject *varName = PySequence_GetSlice($1, 1, PyString_GET_SIZE($1)); $$ = PyObject_CallMethod(rdflib, "Variable", "O", varName); Py_XDECREF(varName); GraphTerm IRIref RDFLiteral NumericLiteral PLUS NumericLiteral MINUS NumericLiteral PyObject *negNum = PyNumber_Negative(PyObject_CallMethod($2,"toPython",NULL)); $$ = PyObject_CallMethod(rdflib, "Literal", "O", negNum); Py_XDECREF(negNum); BooleanLiteral BlankNode NIL NumericLiteral INTEGER PyObject *num = PyNumber_Int($1); $$ = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); DECIMAL PyObject *num = PyNumber_Float($1); $$ = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); DOUBLE PyObject *num = PyNumber_Float($1); $$ = PyObject_CallMethod(rdflib, "Literal", "O", num); Py_XDECREF(num); RDFLiteral String $$ = PyObject_CallMethod(rdflib, "Literal", "O", $1); String LANGTAG PyObject *lang = PySequence_GetSlice($2, 1, PyString_GET_SIZE($2)); $$ = PyObject_CallMethod(rdflib, "Literal", "OO", $1, lang); Py_XDECREF(lang); String DOUBLE_HAT IRIref $$ = PyObject_CallMethod(Expression, "ParsedDatatypedLiteral", "OO", $1,$3); BooleanLiteral TRUE FALSE String STRING_LITERAL_DELIMETER_1 STRING_LITERAL1 STRING_LITERAL_DELIMETER_1 $$ = PyObject_CallMethod(Expression, "ParsedString", "O", $2); STRING_LITERAL_DELIMETER_3 STRING_LITERAL2 STRING_LITERAL_DELIMETER_3 $$ = PyObject_CallMethod(Expression, "ParsedString", "O", $2); STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG1 STRING_LITERAL_DELIMETER_2 $$ = PyObject_CallMethod(Expression, "ParsedString", "O", $2); STRING_LITERAL_DELIMETER_4 STRING_LITERAL_LONG2 STRING_LITERAL_DELIMETER_4 $$ = PyObject_CallMethod(Expression, "ParsedString", "O", $2); STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_1 $$ = PyObject_CallMethod(Expression, "ParsedString", ""); STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_3 $$ = PyObject_CallMethod(Expression, "ParsedString", ""); STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_2 $$ = PyObject_CallMethod(Expression, "ParsedString", ""); STRING_LITERAL_DELIMETER_4 STRING_LITERAL_DELIMETER_4 $$ = PyObject_CallMethod(Expression, "ParsedString", ""); BlankNode ANON $$ = PyObject_CallMethod(rdflib, "Variable", "O", PyObject_CallMethod(rdflib, "BNode","")); BLANK_NODE_LABEL PyObject *lang = PySequence_GetSlice($1, 2, PyString_GET_SIZE($1)); $$ = PyObject_CallMethod(rdflib, "BNode", "O",lang); Py_XDECREF(lang); rdflib-2.4.2/src/bison/SPARQLTokens.bgen.frag0000644000175000017500000000432511153616037017574 0ustar nachonacho WHITESPACE UNION COLON Q_IRI_CONTENT PNAME_NS CONSTRUCT DESCRIBE PNAME_LN BLANK_NODE_LABEL VARNAME PREFIX ASTERISK DOT QUESTION_MARK DOLLAR BASE PREFIX SELECT DISTINCT FROM NAMED OPTIONAL FILTER GRAPH WHERE ORDER BY ASC ASK DESC LIMIT OFFSET STR LANG LANGMATCHES DATATYPE isIRI isURI isLITERAL isBLANK BOUND REGEX A TRUE FALSE DOUBLE_AMPERSAND DOUBLE_PIPE BANG DOUBLE_HAT COMMA QUESTION_MARK DOLLAR FORWARDSLASH LEFT_PAREN RIGHT_PAREN LEFT_SQUARE RIGHT_SQUARE SEMICOLON INTEGER DECIMAL DOUBLE STRING_LITERAL_DELIMETER_1 STRING_LITERAL_DELIMETER_2 STRING_LITERAL_DELIMETER_3 STRING_LITERAL_DELIMETER_4 STRING_LITERAL1 STRING_LITERAL2 STRING_LITERAL_LONG1 STRING_LITERAL_LONG2 NIL ANON LANGTAG LEFT_CURLY RIGHT_CURLY PLUS MINUS EQUALITY_OP NOT_EQUAL LESS_THAN GREATER_THAN LESS_THAN_EQUAL GREATER_THAN_EQUAL rdflib-2.4.2/src/bison/SPARQLLexerPatterns.bgen.frag0000644000175000017500000000704711153616037021135 0ustar nachonacho STRING_MODE_SHORT_1 STRING_MODE_SHORT_2 STRING_MODE_LONG_1 STRING_MODE_LONG_2 IRI_MODE STRING_MODE_SHORT_1 STRING_LITERAL_DELIMETER_1 STRING_MODE_LONG_1 STRING_LITERAL_DELIMETER_2 STRING_MODE_SHORT_2 STRING_LITERAL_DELIMETER_3 STRING_MODE_LONG_2 STRING_LITERAL_DELIMETER_4 IRI_MODE LESS_THAN LANGTAG NIL ANON PNAME_LN PNAME_NS BLANK_NODE_LABEL VARNAME LANGTAG INTEGER DECIMAL DOUBLE INITIAL GREATER_THAN Q_IRI_CONTENT INITIAL STRING_LITERAL1 INITIAL STRING_LITERAL_DELIMETER_1 STRING_LITERAL2 INITIAL STRING_LITERAL_DELIMETER_3 STRING_LITERAL_LONG1 INITIAL STRING_LITERAL_DELIMETER_2 STRING_LITERAL_LONG2 INITIAL STRING_LITERAL_DELIMETER_4 rdflib-2.4.2/src/bison/SPARQL.bgen0000644000175000017500000016603411153616037015500 0ustar nachonacho IRIRef Bindings Query QName GraphPattern FunctionLibrary Operators Triples Resource Filter Util Expression SolutionModifier rdflib RDF Query Prolog QueryTypes $$ = PyObject_CallMethod(Query, "Query", "OO", $1, $2); QueryTypes SelectQuery $$ = $1; Py_INCREF($1); ConstructQuery DescribeQuery AskQuery $$ = $1; Py_INCREF($1); DescribeQuery DESCRIBE VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier $$ = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", $2,$3,$4,$5); DESCRIBE VAR_REFERENCES SolutionModifier Py_INCREF(Py_None); Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", $2,Py_None,Py_None,$3); DESCRIBE VAR_REFERENCES DataSetClauseList SolutionModifier Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", $2,$3,Py_None,$5); DESCRIBE VAR_REFERENCES WhereClause SolutionModifier Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "DescribeQuery", "OOOO", $2,Py_None,$3,$4); ConstructQuery CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY DataSetClauseList WhereClause SolutionModifier $$ = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", $3,$5,$6,$7); CONSTRUCT LEFT_CURLY Triples RIGHT_CURLY WhereClause SolutionModifier Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "ConstructQuery", "OOOO", $3,Py_None,$5,$6); Prolog BaseDecl PrefixDeclList $$ = PyObject_CallMethod(Query, "Prolog", "OO", $1, $2); BaseDecl Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "Prolog", "OO", $1, Py_None); PrefixDeclList Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "Prolog", "OO", Py_None, $1); $$ = Py_None; PrefixDeclList PrefixDecl $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); PrefixDeclList PrefixDecl PyList_Append($1, $2); Py_INCREF($1); $$ = $1; PrefixDecl PREFIX PNAME_NS LESS_THAN Q_IRI_CONTENT GREATER_THAN $$ = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", $2,$4); PREFIX PNAME_NS LESS_THAN GREATER_THAN PyObject *t = PyObject_GetAttrString(Bindings, "EMPTY_STRING"); $$ = PyObject_CallMethod(Bindings, "PrefixDeclaration", "OO", $2,t); Py_XDECREF(t); BaseDecl BASE LESS_THAN Q_IRI_CONTENT GREATER_THAN $$ = PyObject_CallMethod(Bindings, "BaseDeclaration", "O", $3); AskQuery ASK WhereClause Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "AskQuery", "OO", Py_None,$2); ASK DataSetClauseList WhereClause $$ = PyObject_CallMethod(Query, "AskQuery", "OO", $2,$3); SelectQuery SELECT VAR_REFERENCES WhereClause SolutionModifier Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "SelectQuery", "OOOO", $2,Py_None,$3,$4); SELECT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier $$ = PyObject_CallMethod(Query, "SelectQuery", "OOOO", $2,$3,$4,$5); SELECT DISTINCT VAR_REFERENCES WhereClause SolutionModifier Py_INCREF(Py_None); $$ = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", $3,Py_None,$4,$5,1); SELECT DISTINCT VAR_REFERENCES DataSetClauseList WhereClause SolutionModifier $$ = PyObject_CallMethod(Query, "SelectQuery", "OOOOi", $3,$4,$5,$6,1); VAR_REFERENCES VariableReferenceList ASTERISK $$ = Py_None; VariableReferenceList Var $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); VariableReferenceList Var PyList_Append($1, $2); Py_INCREF($1); $$ = $1; IRIref LESS_THAN Q_IRI_CONTENT GREATER_THAN $$ = PyObject_CallMethod(IRIRef, "IRIRef", "O",$2); PrefixedName $$ = PyObject_CallMethod(QName, "QName", "O",$1); PrefixedName PNAME_NS $$ = PyObject_CallMethod(QName, "QName", "O",$1); PNAME_LN $$ = PyObject_CallMethod(QName, "QName", "O",$1); DataSetClauseList DataSetClause $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); DataSetClauseList DataSetClause PyList_Append($1, $2); Py_INCREF($1); $$ = $1; DataSetClause FROM IRIref $$ = PyObject_CallMethod(IRIRef, "RemoteGraph", "O", $2); FROM NAMED IRIref $$ = PyObject_CallMethod(IRIRef, "NamedGraph", "O", $3); WhereClause WHERE GroupGraphPattern $$ = PyObject_CallMethod(Query, "WhereClause", "O", $2); GroupGraphPattern $$ = PyObject_CallMethod(Query, "WhereClause", "O", $1); SolutionModifier $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", ""); OrderClause $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "O",$1); OrderClause LimitClause $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OO",$1,$2); OrderClause LimitClause OffsetClause $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",$1,$2,$3); OrderClause OffsetClause LimitClause $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",$1,$2,$3); LimitClause OffsetClause Py_INCREF(Py_None); $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,$1,$2); OrderClause OffsetClause Py_INCREF(Py_None); $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",$1,Py_None,$2); OffsetClause Py_INCREF(Py_None); Py_INCREF(Py_None); $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,Py_None,$1); LimitClause Py_INCREF(Py_None); Py_INCREF(Py_None); $$ = PyObject_CallMethod(SolutionModifier, "SolutionModifier", "OOO",Py_None,$1,Py_None); OrderClause ORDER BY OrderConditionList $$ = $3; Py_INCREF($3); OrderConditionList OrderCondition $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); OrderConditionList OrderCondition PyList_Append($1, $2); Py_INCREF($1); $$ = $1; OrderCondition ASC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *t = PyObject_GetAttrString(SolutionModifier, "ASCENDING_ORDER"); $$ = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",$3,t); Py_XDECREF(t); DESC LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *t = PyObject_GetAttrString(SolutionModifier, "DESCENDING_ORDER"); $$ = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",$3,t); Py_XDECREF(t); FunctionCall BuiltInCall LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *t = PyObject_GetAttrString(SolutionModifier, "UNSPECIFIED_ORDER"); $$ = PyObject_CallMethod(SolutionModifier, "ParsedOrderConditionExpression", "OO",$2,t); Py_XDECREF(t); Var LimitClause LIMIT NumericLiteral $$ = $2; Py_INCREF($2); OffsetClause OFFSET NumericLiteral $$ = $2; Py_INCREF($2); GroupGraphPattern LEFT_CURLY RIGHT_CURLY $$ = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,Py_None); Py_INCREF(Py_None); LEFT_CURLY Triples GraphPatternList RIGHT_CURLY $$ = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",$2,$3); LEFT_CURLY Triples RIGHT_CURLY $$ = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",$2,Py_None); Py_INCREF(Py_None); LEFT_CURLY GraphPatternList RIGHT_CURLY $$ = PyObject_CallMethod(GraphPattern, "ParsedGroupGraphPattern", "OO",Py_None,$2); Py_INCREF(Py_None); GraphPatternList GraphPattern $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); GraphPatternList GraphPattern PyList_Append($1, $2); Py_INCREF($1); $$ = $1; GraphPattern Filter Triples $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,$1,$2); Py_INCREF(Py_None); Filter DOT Triples $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,$1,$3); Py_INCREF(Py_None); Filter DOT $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,$1,Py_None); Py_INCREF(Py_None); Filter $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",Py_None,$1,Py_None); Py_INCREF(Py_None); GraphPatternNotTriples Triples $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",$1,Py_None,$2); Py_INCREF(Py_None); GraphPatternNotTriples DOT Triples $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",$1,Py_None,$3); Py_INCREF(Py_None); GraphPatternNotTriples $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",$1,Py_None,Py_None); Py_INCREF(Py_None); GraphPatternNotTriples DOT $$ = PyObject_CallMethod(GraphPattern, "GraphPattern", "OOO",$1,Py_None,Py_None); Py_INCREF(Py_None); GraphPatternNotTriples OPTIONAL GroupGraphPattern $$ = PyObject_CallMethod(GraphPattern, "ParsedOptionalGraphPattern", "O", $2); GroupGraphPattern $$ = $1; GroupGraphPattern AlternativeGroupGraphPatterns $$ = PyObject_CallMethod(GraphPattern, "ParsedAlternativeGraphPattern", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2)); GRAPH Var GroupGraphPattern $$ = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", $2,$3); GRAPH BlankNode GroupGraphPattern $$ = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", $2,$3); GRAPH IRIref GroupGraphPattern $$ = PyObject_CallMethod(GraphPattern, "ParsedGraphGraphPattern", "OO", $2,$3); AlternativeGroupGraphPatterns UNION GroupGraphPattern $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $2); Py_INCREF($2); AlternativeGroupGraphPatterns UNION GroupGraphPattern PyList_Append($1, $3); Py_INCREF($1); $$ = $1; ConditionalOrExpression ConditionalAndExpression ConditionalAndExpression ConditionalAndExpressionList $$ = PyObject_CallMethod(Expression, "ParsedConditionalAndExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2)); ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $2); Py_INCREF($2); ConditionalAndExpressionList DOUBLE_PIPE ConditionalAndExpression PyList_Append($1, $3); Py_INCREF($1); $$ = $1; ConditionalAndExpression RelationalExpression ValueLogicalList $$ = PyObject_CallMethod(Expression, "ParsedRelationalExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2)); RelationalExpression ValueLogicalList DOUBLE_AMPERSAND RelationalExpression $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $2); Py_INCREF($2); ValueLogicalList DOUBLE_AMPERSAND RelationalExpression PyList_Append($1, $3); Py_INCREF($1); $$ = $1; RelationalExpression AdditiveExpression AdditiveExpression EQUALITY_OP AdditiveExpression $$ = PyObject_CallMethod(Operators, "EqualityOperator", "OO", $1,$3); AdditiveExpression NOT_EQUAL AdditiveExpression $$ = PyObject_CallMethod(Operators, "NotEqualOperator", "OO", $1,$3); AdditiveExpression LESS_THAN AdditiveExpression $$ = PyObject_CallMethod(Operators, "LessThanOperator", "OO", $1,$3); AdditiveExpression GREATER_THAN AdditiveExpression $$ = PyObject_CallMethod(Operators, "GreaterThanOperator", "OO", $1,$3); AdditiveExpression LESS_THAN_EQUAL AdditiveExpression $$ = PyObject_CallMethod(Operators, "LessThanOrEqualOperator", "OO", $1,$3); AdditiveExpression GREATER_THAN_EQUAL AdditiveExpression $$ = PyObject_CallMethod(Operators, "GreaterThanOrEqualOperator", "OO", $1,$3); AdditiveExpression MultiplicativeExpression MultiplicativeExpression MultiplicativeExpressionList $$ = PyObject_CallMethod(Expression, "ParsedAdditiveExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2)); /*$$ = PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2);*/ MultiplicativeExpressionList PLUS MultiplicativeExpression $$ = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",$1, $2); PyList_SET_ITEM($$, 0, mList); Py_INCREF(mList); MINUS MultiplicativeExpression $$ = PyList_New(1); PyObject *mList = PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",$1, $2); PyList_SET_ITEM($$, 0, mList); Py_INCREF(mList); MultiplicativeExpressionList MINUS MultiplicativeExpression PyList_Append($1, PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",$2, $3)); Py_INCREF($1); $$ = $1; MultiplicativeExpressionList PLUS MultiplicativeExpression PyList_Append($1, PyObject_CallMethod(Expression, "ParsedPrefixedMultiplicativeExpressionList", "OO",$2, $3)); Py_INCREF($1); $$ = $1; MultiplicativeExpression UnaryExpression UnaryExpression UnaryExpressionList $$ = PyObject_CallMethod(Expression, "ParsedMultiplicativeExpressionList", "O", PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$2)); UnaryExpressionList ASTERISK UnaryExpression $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $2); Py_INCREF($2); FORWARDSLASH UnaryExpression $$ = PyList_New(1); PyList_SET_ITEM($$, 0, $2); Py_INCREF($2); UnaryExpressionList ASTERISK UnaryExpression PyList_Append($1, $3); Py_INCREF($1); $$ = $1; UnaryExpressionList FORWARDSLASH UnaryExpression PyList_Append($1, $3); Py_INCREF($1); $$ = $1; UnaryExpression BANG PrimaryExpression $$ = PyObject_CallMethod(Operators, "LogicalNegation", "O", $2); PLUS PrimaryExpression $$ = PyObject_CallMethod(Operators, "NumericPositive", "O", $2); MINUS PrimaryExpression $$ = PyObject_CallMethod(Operators, "NumericNegative", "O", $2); PrimaryExpression BuiltInCall STR LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "STR"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); LANG LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANG"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); LANGMATCHES LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "LANGMATCHES"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OOO", funcName,$3,$5); DATATYPE LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "DATATYPE"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); BOUND LEFT_PAREN Var RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "BOUND"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); isIRI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isIRI"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); isURI LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isURI"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); isBLANK LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isBLANK"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); isLITERAL LEFT_PAREN ConditionalOrExpression RIGHT_PAREN PyObject *funcName = PyObject_GetAttrString(FunctionLibrary, "isLITERAL"); $$ = PyObject_CallMethod(FunctionLibrary, "BuiltinFunctionCall", "OO", funcName,$3); RegexExpression RegexExpression REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN $$ = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OO", $3,$5); REGEX LEFT_PAREN ConditionalOrExpression COMMA ConditionalOrExpression COMMA ConditionalOrExpression RIGHT_PAREN $$ = PyObject_CallMethod(FunctionLibrary, "ParsedREGEXInvocation", "OOO", $3,$5,$7); . FunctionCall IRIref LEFT_PAREN ArgumentList RIGHT_PAREN $$ = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", $1, $3); IRIref NIL $$ = PyObject_CallMethod(FunctionLibrary, "FunctionCall", "OO", $1, PyList_New(0)); ArgumentList ConditionalOrExpression $$ = PyList_New(1); /* Steals a reference */ PyList_SET_ITEM($$, 0, $1); Py_INCREF($1); ConditionalOrExpression COMMA ArgumentList $$ = PyObject_CallMethod(Util, "ListPrepend", "OO", $1,$3); PrimaryExpression LEFT_PAREN ConditionalOrExpression RIGHT_PAREN $$ = $2; Py_INCREF($2); BuiltInCall IRIref FunctionCall RDFLiteral NumericLiteral BooleanLiteral BlankNode Var rdflib-2.4.2/src/bison/README.txt0000644000175000017500000000102411153616037015322 0ustar nachonachoThis parser is implemented using the BisonGen format (see references at bottom). SPARQLParser.c is generated from all the .bgen and .bgen.frag files. The command-line invokation for doing this is: BisonGen --mode=c SPARQL.bgen NOTE: The latest version of BisonGen (from CVS) may be required instead of the most package ftp://ftp.4suite.org/pub/BisonGen/ ## Bison Gen Resources ## - Copia article on BisonGen (with links): http://copia.ogbuji.net/blog/2005-04-27/Of_BisonGe - BisonGen CVS Tree: cvs.4suite.org/viewcvs/BisonGen/rdflib-2.4.2/src/bison/SPARQLLexerDefines.bgen.frag0000644000175000017500000001730111153616037020704 0ustar nachonacho [\u0300-\u0345\u0360-\u0361\u0483-\u0486\u0591-\u05A1\u05A3-\u05B9\u05BB-\u05BD\u05BF\u05C1-\u05C2\u05C4\u064B-\u0652\u0670\u06D6-\u06DC\u06DD-\u06DF\u06E0-\u06E4\u06E7-\u06E8\u06EA-\u06ED\u0901-\u0903\u093C\u093E-\u094C\u094D\u0951-\u0954\u0962-\u0963\u0981-\u0983\u09BC\u09BE\u09BF\u09C0-\u09C4\u09C7-\u09C8\u09CB-\u09CD\u09D7\u09E2-\u09E3\u0A02\u0A3C\u0A3E\u0A3F\u0A40-\u0A42\u0A47-\u0A48\u0A4B-\u0A4D\u0A70-\u0A71\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0B01-\u0B03\u0B3C\u0B3E-\u0B43\u0B47-\u0B48\u0B4B-\u0B4D\u0B56-\u0B57\u0B82-\u0B83\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C01-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55-\u0C56\u0C82-\u0C83\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5-\u0CD6\u0D02-\u0D03\u0D3E-\u0D43\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB-\u0EBC\u0EC8-\u0ECD\u0F18-\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86-\u0F8B\u0F90-\u0F95\u0F97\u0F99-\u0FAD\u0FB1-\u0FB7\u0FB9\u20D0-\u20DC\u20E1\u302A-\u302F\u3099\u309A] [\u00B7\u02D0\u02D1\u0387\u0640\u0E46\u0EC6\u3005\u3031-\u3035\u309D-\u309E\u30FC-\u30FE] {Letter}|{Digit}|\.|-|_|{CombiningChar}|{Extender} [\u0041-\u005A\u0061-\u007A\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF\u0100-\u0131\u0134-\u013E\u0141-\u0148\u014A-\u017E\u0180-\u01C3\u01CD-\u01F0\u01F4-\u01F5\u01FA-\u0217\u0250-\u02A8\u02BB-\u02C1\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03CE\u03D0-\u03D6\u03DA\u03DC\u03DE\u03E0\u03E2-\u03F3\u0401-\u040C\u040E-\u044F\u0451-\u045C\u045E-\u0481\u0490-\u04C4\u04C7-\u04C8\u04CB-\u04CC\u04D0-\u04EB\u04EE-\u04F5\u04F8-\u04F9\u0531-\u0556\u0559\u0561-\u0586\u05D0-\u05EA\u05F0-\u05F2\u0621-\u063A\u0641-\u064A\u0671-\u06B7\u06BA-\u06BE\u06C0-\u06CE\u06D0-\u06D3\u06D5\u06E5-\u06E6\u0905-\u0939\u093D\u0958-\u0961\u0985-\u098C\u098F-\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09DC-\u09DD\u09DF-\u09E1\u09F0-\u09F1\u0A05-\u0A0A\u0A0F-\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32-\u0A33\u0A35-\u0A36\u0A38-\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8B\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2-\u0AB3\u0AB5-\u0AB9\u0ABD\u0AE0\u0B05-\u0B0C\u0B0F-\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32-\u0B33\u0B36-\u0B39\u0B3D\u0B5C-\u0B5D\u0B5F-\u0B61\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99-\u0B9A\u0B9C\u0B9E-\u0B9F\u0BA3-\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB5\u0BB7-\u0BB9\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C60-\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CDE\u0CE0-\u0CE1\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D28\u0D2A-\u0D39\u0D60-\u0D61\u0E01-\u0E2E\u0E30\u0E32-\u0E33\u0E40-\u0E45\u0E81-\u0E82\u0E84\u0E87-\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA-\u0EAB\u0EAD-\u0EAE\u0EB0\u0EB2-\u0EB3\u0EBD\u0EC0-\u0EC4\u0F40-\u0F47\u0F49-\u0F69\u10A0-\u10C5\u10D0-\u10F6\u1100\u1102-\u1103\u1105-\u1107\u1109\u110B-\u110C\u110E-\u1112\u113C\u113E\u1140\u114C\u114E\u1150\u1154-\u1155\u1159\u115F-\u1161\u1163\u1165\u1167\u1169\u116D-\u116E\u1172-\u1173\u1175\u119E\u11A8\u11AB\u11AE-\u11AF\u11B7-\u11B8\u11BA\u11BC-\u11C2\u11EB\u11F0\u11F9\u1E00-\u1E9B\u1EA0-\u1EF9\u1F00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2126\u212A-\u212B\u212E\u2180-\u2182\u3041-\u3094\u30A1-\u30FA\u3105-\u312C\uAC00-\uD7A3] [\u4E00-\u9FA5\u3007\u3021-\u3029] {BaseChar}|{Ideographic} ({Letter}|_){NCNameChar}* ({Letter}|_|{Digit}){NCNameChar}* ({NCName}:)?{NCName} [0-9] @[a-zA-Z]+(-[a-zA-Z0-9]+)* ({Digit}+\.{Digit}*)|(\.{Digit}+) [Ee][\+\-]?{Digit}+ \\[tbnrf\"'] ([^\x27\x5C\x0A\x0D]|{Echar})* ([^\x22\x5C\x0A\x0D]|{Echar})* (\x27[0,2]([^\x27\x5C]|{Echar}))* (\x22{0,2}([^\x22\x5C]|{Echar}))* \({Ws}*\) \u0020|\u0009|\u000D|\u000A \[{Ws}*\] <([^<>'{}|^`\u0000-\u0020])*> [A-z]|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|[\u10000-\uEFFFF] {PN_Chars_U}|-|{Digit}|\u00B7|[\u0300-\u036F]|[\u0203F-\u2040] {PN_Chars_Base}|_ ({PN_Chars_U}|{Digit})(({PN_Chars} | \.)* {PN_Chars})? {PName_NS}{NCName_with_digits} ({NCName})?: _:{NCName} {NCChar1}|-|{Digit}|\u00B7|[\u0300-\u036F] | [\u0203F-\u2040] {NCChar1p}|_ [A-z]|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFFFD] {NCChar1p}|{NCChar1p}({NCChar}|\.)*{NCChar} [\t\n\r ]+ ({PN_Chars_U}|{Digit})(PN_Chars_U}|{Digit}|\u00B7|[\u0300-\u036F]|[\u0203F-\u2040])* rdflib-2.4.2/src/bison/SPARQLLiteralLexerPatterns.bgen.frag0000644000175000017500000001050211153616037022440 0ustar nachonacho UNION ASTERISK BASE PREFIX SELECT DISTINCT FROM NAMED OPTIONAL FILTER GRAPH WHERE ORDER BY ASC ASK CONSTRUCT DESCRIBE DESC LIMIT OFFSET STR LANG LANGMATCHES DATATYPE isIRI isURI isBLANK isLITERAL BOUND REGEX TRUE FALSE A MINUS PLUS DOUBLE_AMPERSAND DOUBLE_PIPE BANG DOUBLE_HAT COMMA COLON QUESTION_MARK DOLLAR FORWARDSLASH LEFT_PAREN RIGHT_PAREN LEFT_SQUARE RIGHT_SQUARE EQUALITY_OP NOT_EQUAL LESS_THAN GREATER_THAN LESS_THAN_EQUAL GREATER_THAN_EQUAL SEMICOLON LEFT_CURLY RIGHT_CURLY DOT rdflib-2.4.2/test/0000755000175000017500000000000011204354476012710 5ustar nachonachordflib-2.4.2/test/n3/0000755000175000017500000000000011204354476013230 5ustar nachonachordflib-2.4.2/test/n3/listTest.n30000644000175000017500000000011711153616024015275 0ustar nachonacho@prefix :. :gunnar :name ("Gunnar" "Aastrand" "Grimnes").rdflib-2.4.2/test/n3/longString.rdf0000644000175000017500000000050211153616024016041 0ustar nachonacho <_3:desc>This is a really long string, with newlines in it, and " quotes'. rdflib-2.4.2/test/n3/longString.n30000644000175000017500000000016711153616024015615 0ustar nachonacho@prefix : . :gunnar :desc "This is a really long string, with \n newlines in it, and \" quotes.".rdflib-2.4.2/test/n3/n3-writer-test-01.n30000644000175000017500000000007611153616024016513 0ustar nachonacho@prefix a: . @prefix : <> . # Empty model. rdflib-2.4.2/test/n3/n3-writer-test-03.n30000644000175000017500000000024611153616024016514 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Compound statements, same resource" . a:b a:p1 "123" ; a:p1 "456" . a:b a:p2 a:v1 ; a:p3 a:v2 . rdflib-2.4.2/test/n3/n3-writer-test-04.n30000644000175000017500000000026111153616024016512 0ustar nachonacho@prefix a: . @prefix b: . @prefix : <#> . :test :comment "Multiple values" . a:b a:p1 "123" ; a:p1 "456" . b:b a:p2 a:v1 ; a:p3 a:v2 . rdflib-2.4.2/test/n3/n3-writer-test-05.n30000644000175000017500000000013411153616024016512 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "BNode" . [ a:p a:v ] . rdflib-2.4.2/test/n3/n3-writer-test-06.n30000644000175000017500000000020511153616024016512 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "BNode as single object" . a:b a:oneRef [ a:pp "1" ; a:qq "2" ]. rdflib-2.4.2/test/n3/n3-writer-test-07.n30000644000175000017500000000025011153616024016513 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "BNode but shared" . a:b1 a:twoRef _:a . a:b2 a:twoRef _:a . _:a :pred [ a:pp "1" ; a:qq "2" ]. rdflib-2.4.2/test/n3/n3-writer-test-08.n30000644000175000017500000000030111153616024016511 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Not a bnode" . a:b1 a:twoRef . a:b2 a:twoRef . :p [ a:pp "1" ; a:qq "2" ]. rdflib-2.4.2/test/n3/n3-writer-test-09.n30000644000175000017500000000025511153616024016522 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Not a bnode. Ref once." . a:b1 a:oneRef . :p [ a:pp "1" ; a:qq "2" ]. rdflib-2.4.2/test/n3/n3-writer-test-10.n30000644000175000017500000000027711153616024016516 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Syntactic keywords". _:a => a:something . a:b a . a:this = a:that . rdflib-2.4.2/test/n3/n3-writer-test-11.n30000644000175000017500000000024311153616024016510 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "RDF collections as lists: unattached lists" . ("1" "2" "3"). # This is not a statement. (). rdflib-2.4.2/test/n3/n3-writer-test-12.n30000644000175000017500000000022011153616024016504 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "RDF collections as lists". a:list3 a:p ("1" "2" "3"). a:list0 a:p (). rdflib-2.4.2/test/n3/n3-writer-test-13.n30000644000175000017500000000020411153616024016507 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "RDF collections as lists" . a:list0 a:p (). _:list0 a:p (). rdflib-2.4.2/test/n3/n3-writer-test-14.n30000644000175000017500000000021611153616024016513 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "RDF collections as lists: nil list with property". () a:prop "nilProp" . rdflib-2.4.2/test/n3/n3-writer-test-15.n30000644000175000017500000000105611153616024016517 0ustar nachonacho@prefix rdf: . @prefix a: . @prefix : <#> . :test :comment "RDF collections as lists : but with attached property so not well-formed lists" . # This is the list ("1" "2" "3" "4") _:a1 rdf:first "1" . _:a1 rdf:rest _:a2 . _:a2 rdf:first "2" . _:a2 rdf:rest _:a3 . _:a3 rdf:first "3" . _:a3 rdf:rest _:a4 . _:a4 rdf:first "4" . _:a4 rdf:rest rdf:nil . # This adds a property to the middle element. # Tail ("3" "4") should be pretty printed. _:a2 a:prop "value" . rdflib-2.4.2/test/n3/n3-writer-test-16.n30000644000175000017500000000036711153616024016524 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "RDF collections as lists : with compound items". a:a a:p ( [ a:p2 "v1" ] ("inner list") ) . a:p "value" . rdflib-2.4.2/test/n3/n3-writer-test-17.n30000644000175000017500000000070711153616024016523 0ustar nachonacho@prefix rdf: . @prefix a: . @prefix : <#> . :test :comment "Complex DAML List: duplicate properties". # This is the list ("1" "2" "3"). _:a1 rdf:first "1" . _:a1 rdf:rest _:a2 . _:a2 rdf:first "2" . _:a2 rdf:rest _:a3 . _:a3 rdf:first "3" . _:a3 rdf:rest () . # This adds a property to the middle element. # Could pretty print the tail ("3") _:a2 rdf:first "TWO" . rdflib-2.4.2/test/n3/n3-writer-test-18.n30000644000175000017500000000022611153616024016520 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Nested bNodes" . a:a a:p [ a:p2 [ a:p3 "v1" , "v2" ; a:p4 "v3" ] ; a:p5 "v4" ] . rdflib-2.4.2/test/n3/n3-writer-test-19.n30000644000175000017500000000024211153616024016517 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Shared RDF collections as lists" . :a :p _:list . :b :p _:list . _:list :p ("1" "2") . rdflib-2.4.2/test/n3/n3-writer-test-20.n30000644000175000017500000000027411153616024016514 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "Shared RDF lists tails" . :a :p :list . :b :p :list . :list :p ("1" "2") . ("a" "b") . :list2 :p ("3" "4") . rdflib-2.4.2/test/n3/n3-writer-test-21.n30000644000175000017500000000031311153616024016507 0ustar nachonacho# Test names with .'s in potential qnames. @prefix foo: . @prefix : <#> . :localname . rdflib-2.4.2/test/n3/n3-writer-test-22.n30000644000175000017500000000032011153616024016506 0ustar nachonacho# Test datatypes. @prefix xsd: . @prefix x: . @prefix : <#> . [ :prop1 "123" ; :prop1 "123"^^xsd:string ; :prop1 "123"^^xsd:integer ; ] . rdflib-2.4.2/test/n3/n3-writer-test-23.n30000644000175000017500000000015011153616024016510 0ustar nachonacho# Test lang tags @prefix : <#> . [ :prop "chat"@fr ; :prop "chat"@en ; :prop "chat" ; ] . rdflib-2.4.2/test/n3/n3-writer-test-24.n30000644000175000017500000000027511153616024016521 0ustar nachonacho# XML literals @prefix rdf: . @prefix x: . [ x:p1 "bar"^^rdf:XMLLiteral ; x:p2 "bar" ; ] . rdflib-2.4.2/test/n3/n3-writer-test-26.n30000644000175000017500000000221211153616024016514 0ustar nachonacho@prefix rdfs: . @prefix rdf: . @prefix joseki: . <> rdfs:comment """Taken from joseki configuration file - has long properties and nested objects.""" . ## Server configuration a joseki:AttachedModel ; joseki:attachedModel ; joseki:hasQueryOperation joseki:BindingRDQL ; joseki:hasQueryOperation joseki:BindingGET ; joseki:hasQueryOperation joseki:BindingSPO ; # Allow query over POST joseki:hasOperation joseki:BindingQueryModel ; joseki:hasOperation joseki:BindingOptions ; joseki:hasQueryOperation [ a joseki:QueryLanguageBinding ; joseki:queryOperationName "fetch" ; joseki:queryOperation [ joseki:className "org.joseki.server.processors.QueryProcessorFetch" ; ] ] ; joseki:isImmutable "true" ; rdfs:comment "Deck of cards" ; . # Local Variables: # tab-width: 4 # indent-tabs-mode: nil # End: rdflib-2.4.2/test/n3/n3-writer-test-28.n30000644000175000017500000000042511153616024016522 0ustar nachonacho# Numbers with errors. # See also rdf-test-28.n3 @prefix xsd: . @prefix x: . @prefix : <#> . :y :p1 "xyz"^^xsd:integer . :y :p2 "12xyz"^^xsd:integer . :y :p5 "xy.z"^^xsd:double . :y :p6 "+1.0z"^^xsd:double . rdflib-2.4.2/test/n3/rdf-test-02.n30000644000175000017500000000024611153616024015434 0ustar nachonacho# Test compounds in anon nodes @prefix ns1: . @prefix ns2: . [ ns1:bp1 ns1:bv1 ; ns1:bp2 ns1:bv2 ] ns2:p1 ns2:v1. rdflib-2.4.2/test/n3/rdf-test-03.n30000644000175000017500000000011511153616024015430 0ustar nachonacho# Test anon nodes @prefix ns: . [] ns:p1 ns:v1. rdflib-2.4.2/test/n3/rdf-test-04.n30000644000175000017500000000023311153616024015432 0ustar nachonacho# Test shared bNode @prefix ns: . _:a ns:prop1 "anon1" . _:a ns:prop2 "anon2" . _:a ns:prop3 _:b . _:a ns:prop4 _:b . rdflib-2.4.2/test/n3/rdf-test-05.n30000644000175000017500000000011111153616024015426 0ustar nachonacho# Tests <> as a prefix and as a triple node @prefix : <> . <> a :a. rdflib-2.4.2/test/n3/rdf-test-06.n30000644000175000017500000000011411153616024015432 0ustar nachonacho# Tests <#> as a prefix and a triple node @prefix : <#> . <#> :p "v" . rdflib-2.4.2/test/n3/rdf-test-07.n30000644000175000017500000000017311153616024015440 0ustar nachonacho# Tests reverse properties @prefix : <#> . # Should be one statement. "value" is :prop of :b . :b :prop "value" . rdflib-2.4.2/test/n3/rdf-test-09.n30000644000175000017500000000012111153616024015433 0ustar nachonacho# Lists @prefix : <> . :a :p1 (). :a :p2 (). :a :p3 (:b "2"). rdflib-2.4.2/test/n3/rdf-test-11.n30000644000175000017500000000043111153616024015430 0ustar nachonacho# Test the syntactic items for abbreviation. # This is *not* testing rule application. @prefix : <#> . _:a => _:b . _:a <= _:b . _:a = _:b . # 'this' refers to the current formula so is not legal for RDF # this a "thing". <> a "thing too". <#> a "another thing". rdflib-2.4.2/test/n3/rdf-test-12.n30000644000175000017500000000016311153616024015433 0ustar nachonacho# Test defining _ as a prefix. @prefix : <#> . _:a a :p. @prefix _: . _:a a _:p. rdflib-2.4.2/test/n3/rdf-test-13.n30000644000175000017500000000017011153616024015432 0ustar nachonacho# Test redefining a prefix @prefix a: . a:b a:p a:v . @prefix a: . a:b a:p a:v . rdflib-2.4.2/test/n3/rdf-test-14.n30000644000175000017500000000016211153616024015434 0ustar nachonacho# Test the "is ... of" constuct @prefix : <#> . "value1" is :prop of :s . "value2" is :prop of :s1, :s2 . rdflib-2.4.2/test/n3/rdf-test-15.n30000644000175000017500000000053511153616024015441 0ustar nachonacho# Forward paths @prefix : <#>. :x1!:y1 :p1 "3" . :a1!:b1!:c1 :q1 "3" ; :q2 "4" , "5" . # NB Cwm is wrong on these (it can't cope with qnames of :a etc in paths). @prefix a: . :x2.a:y2 :p2 "3" . :a2.a:b2.a:c2 :q1 "3" ; :q2 "4" , "5" . # Object slot # cwm does not generate the right thing. :r :p :o.a:p1.a:p2 . rdflib-2.4.2/test/n3/rdf-test-16.n30000644000175000017500000000013411153616024015435 0ustar nachonacho# Backward paths @prefix : <#>. :x^:y :p "3" . :a^:b^:c :q1 "3" ; :q2 "4" , "5" . rdflib-2.4.2/test/n3/rdf-test-17.n30000644000175000017500000000054611153616024015445 0ustar nachonacho# Mixed paths and paths as objects @prefix : <#>. :a!:b^:c :q1 "1" ; :q2 "21" , "22" . :x^:y!:z :q3 "3" ; :q4 "41" , "42" . :d1!:e1!:f1. :d2^:e2^:f2. :d3^:e3!:f3. :d4!:e4^:f4. rdflib-2.4.2/test/n3/rdf-test-18.n30000644000175000017500000000010711153616024015437 0ustar nachonacho# Mixed lists and paths @prefix : <#>. :a :b ( :d1!:e1 :d2!:e2 ). rdflib-2.4.2/test/n3/rdf-test-19.n30000644000175000017500000000012611153616024015441 0ustar nachonacho# Named DAML lists @prefix : <#>. :a :p _:a . :b :p _:a . _:a :p ("1" "2") . rdflib-2.4.2/test/n3/rdf-test-20.n30000644000175000017500000000013411153616024015430 0ustar nachonacho# Named DAML lists @prefix : <#>. :a :p :list . :b :p :list . :list :p ("1" "2") . rdflib-2.4.2/test/n3/rdf-test-21.n30000644000175000017500000000062111153616024015432 0ustar nachonacho# Datatypes @prefix : <#> . @prefix xsd: . # No datatype. :a :p "11" . ## Datatype - same property, but datatyped => new statement :a :p "11"^^xsd:integer . # Only one statement here: :a :q "12"^^ . :a :q "12"^^xsd:integer . # Two statements :a :q2 "99"^^xsd:integer ; :q2 "99"^^xsd:string . rdflib-2.4.2/test/n3/rdf-test-22.n30000644000175000017500000000020011153616024015424 0ustar nachonacho# Language tags @prefix x: . [ x:q1 "UK-English"@en-uk ; x:q2 "French"@fr ; x:q3 "aaaa"@any ] . rdflib-2.4.2/test/n3/rdf-test-23.n30000644000175000017500000000033411153616024015435 0ustar nachonacho# XML literals @prefix : <#> . @prefix rdf: . # Two statements here # Plain string :a :q1 "bar" ; :q1 "bar"^^rdf:XMLLiteral ; . rdflib-2.4.2/test/n3/rdf-test-26.n30000644000175000017500000000017311153616024015441 0ustar nachonacho@prefix : <#> . # alpha :x :y1 "\u03B1" . # e-acute :x :y2 "\u00E9" . # Literal e-acute in text :x :y3 "é" . rdflib-2.4.2/test/n3/rdf-test-28.n30000644000175000017500000000035111153616024015441 0ustar nachonacho# Strange xsd:interges and xsd:doubles @prefix : <#> . @prefix rdf: . @prefix xsd: . :x :p1 "abc"^^xsd:integer . :x :p2 "ab.c"^^xsd:double . rdflib-2.4.2/test/test_datatype_parsing.pyc0000644000175000017500000000347211164177225020030 0ustar nachonachoÑò ¯Ic @sÓddkZddklZddklZlZlZlZlZddkl Z ddk l Z ddk l Z dZ edƒZed d e iƒZd eifd „ƒYZed joeiƒndS(iÿÿÿÿN(tpprint(tConjunctiveGraphtURIReftLiteraltRDFSt Namespace(t_XSD_NS(tStringIO(tSets¥ @prefix : . @prefix xsd: . :xi2 :p "1"^^xsd:integer . :xd3 :p "1"^^xsd:double . shttp://example.org/things#t1tdatatypetTestSparqlOPT_FILTERcBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtloadRt testContent(tself((sE/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_parsing.pytsetUps cCsng}|iidtidtiƒD] }||q&~}|i|ditijdt |dfƒdS(Ntsubjectt predicateisExpecting %r, got instead : %r( RtobjectstexNStxd3tpt failUnlessR Rtdoubletdouble1(Rt_[1]totxd3Objs((sE/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_parsing.pyttest_OPT_FILTERs<(t__name__t __module__RR(((sE/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_parsing.pyR s t__main__(tunittestRtrdflibRRRRRtrdflib.LiteralRRtsetsRRRRRtTestCaseR R tmain(((sE/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_parsing.pyts (  rdflib-2.4.2/test/parser_rdfcore.py0000644000175000017500000001407411153616026016263 0ustar nachonachoimport unittest from rdflib import URIRef, BNode, Literal, RDF, RDFS from rdflib.Namespace import Namespace from rdflib.exceptions import ParserError from rdflib.Graph import Graph from rdflib.util import first import logging _logger = logging.getLogger("parser_rdfcore") verbose = 0 from encodings.utf_8 import StreamWriter import sys sw = StreamWriter(sys.stdout) def write(msg): _logger.info(msg+"\n") #sw.write(msg+"\n") class TestStore(Graph): def __init__(self, expected): super(TestStore, self).__init__() self.expected = expected def add(self, (s, p, o)): if not isinstance(s, BNode) and not isinstance(o, BNode): if not (s, p, o) in self.expected: m = u"Triple not in expected result: %s, %s, %s" % (s.n3(), p.n3(), o.n3()) if verbose: write(m) #raise Exception(m) super(TestStore, self).add((s, p, o)) TEST = Namespace("http://www.w3.org/2000/10/rdf-tests/rdfcore/testSchema#") import os def resolve(rel): return "http://www.w3.org/2000/10/rdf-tests/rdfcore/" + rel def _testPositive(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) outDoc = first(manifest.objects(uri, TEST["outputDocument"])) expected = Graph() if outDoc[-3:]==".nt": format = "nt" else: format = "xml" expected.load(outDoc, format=format) store = TestStore(expected) if inDoc[-3:]==".nt": format = "nt" else: format = "xml" try: store.load(inDoc, format=format) except ParserError, pe: write("Failed '") write(inDoc) write("' failed with") raise pe try: write(type(pe)) except: write("sorry could not dump out error.") result = 1 else: if not store.isomorphic(expected): write(u"""Failed: '%s'""" % uri) if verbose: write(""" In:\n""") for s, p, o in store: write("%s %s %s." % (repr(s), repr(p), repr(o))) write(""" Out:\n""") for s, p, o in expected: write("%s %s %s." % (repr(s), repr(p), repr(o))) result += 1 return result def _testNegative(uri, manifest): if verbose: write(u"TESTING: %s" % uri) result = 0 # 1=failed, 0=passed inDoc = first(manifest.objects(uri, TEST["inputDocument"])) store = Graph() test = BNode() results.add((test, RESULT["test"], uri)) results.add((test, RESULT["system"], system)) try: if inDoc[-3:]==".nt": format = "nt" else: format = "xml" store.load(inDoc, format=format) except ParserError, pe: results.add((test, RDF.type, RESULT["PassingRun"])) #pass else: write(u"""Failed: '%s'""" % uri) results.add((test, RDF.type, RESULT["FailingRun"])) result = 1 return result class ParserTestCase(unittest.TestCase): store = 'default' path = 'store' slowtest = True def setUp(self): self.manifest = manifest = Graph(store=self.store) manifest.open(self.path) manifest.load("http://www.w3.org/2000/10/rdf-tests/rdfcore/Manifest.rdf") def tearDown(self): self.manifest.close() def testNegative(self): manifest = self.manifest num_failed = total = 0 negs = list(manifest.subjects(RDF.type, TEST["NegativeParserTest"])) negs.sort() for neg in negs: status = first(manifest.objects(neg, TEST["status"])) if status==Literal("APPROVED"): result = _testNegative(neg, manifest) total += 1 num_failed += result self.assertEquals(num_failed, 0, "Failed: %s of %s." % (num_failed, total)) def testPositive(self): manifest = self.manifest uris = list(manifest.subjects(RDF.type, TEST["PositiveParserTest"])) uris.sort() num_failed = total = 0 for uri in uris: status = first(manifest.objects(uri, TEST["status"])) if status==Literal("APPROVED"): result = _testPositive(uri, manifest) test = BNode() results.add((test, RESULT["test"], uri)) results.add((test, RESULT["system"], system)) if not result: results.add((test, RDF.type, RESULT["PassingRun"])) else: results.add((test, RDF.type, RESULT["FailingRun"])) total += 1 num_failed += result self.assertEquals(num_failed, 0, "Failed: %s of %s." % (num_failed, total)) RESULT = Namespace("http://www.w3.org/2002/03owlt/resultsOntology#") FOAF = Namespace("http://xmlns.com/foaf/0.1/") results = Graph() system = BNode("system") results.add((system, FOAF["homepage"], URIRef("http://rdflib.net/"))) results.add((system, RDFS.label, Literal("RDFLib"))) results.add((system, RDFS.comment, Literal(""))) if __name__ == "__main__": manifest = Graph() manifest.load("http://www.w3.org/2000/10/rdf-tests/rdfcore/Manifest.rdf") import sys, getopt try: optlist, args = getopt.getopt(sys.argv[1:], 'h:', ["help"]) except getopt.GetoptError, msg: write(msg) usage() try: argv = sys.argv for arg in sys.argv[1:]: verbose = 1 case = URIRef(arg) write(u"Testing: %s" % case) if (case, RDF.type, TEST["PositiveParserTest"]) in manifest: result = _testPositive(case, manifest) write(u"Positive test %s" % ["PASSED", "FAILED"][result]) elif (case, RDF.type, TEST["NegativeParserTest"]) in manifest: result = _testNegative(case, manifest) write(u"Negative test %s" % ["PASSED", "FAILED"][result]) else: write(u"%s not ??" % case) if len(argv)<=1: unittest.main() finally: results.serialize("results.rdf") rdflib-2.4.2/test/a.n30000644000175000017500000000006111153616026013362 0ustar nachonacho:a :b :c. :d :e :f. {?x ?y ?z} => {?z ?y ?x}. rdflib-2.4.2/test/test_sparql_filters.pyc0000644000175000017500000000342611164177226017524 0ustar nachonachoÑò ¯Ic@s|ddklZlZlZddklZdZeƒZeieeƒddƒedƒZdZ d„Z d „Z d S( iÿÿÿÿ(tConjunctiveGraphtURIReftLiteral(tStringIOs% @prefix rdfs: . rdfs:label "Document 1"@en, "Dokument 1"@sv . rdfs:label "Document 2"@en, "Dokument 2"@sv . rdfs:label "Document 3"@en, "Dokument 3"@sv . tformattn3shttp://example.org/doc/1s: PREFIX rdfs: c cs“ddg}tdtiƒd}xh|D]`\}}ti||ƒ}g}|iD]}||iƒqX~}|g}t||fVq+WdS( Ntenu"Document 1"@entsvu"Dokument 1"@svs+ SELECT ?label WHERE { sK rdfs:label ?label . FILTER(LANG(?label) = "%s") } (senu"Document 1"@en(ssvu"Dokument 1"@sv(tPROLOGUEtdoc1Rtgraphtquerytselectedt assert_equal( ttestdataR tlangtliteraltrest_[1]tbindingtactualtexpected((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_filters.pyttest_filter_by_langs  * cCs%||jptd||f‚dS(NsExpected %r == %s(tAssertionError(tv1tv2((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_filters.pyR +sN( trdflibRRRRt testContentR tloadR RRR (((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_filters.pyts    rdflib-2.4.2/test/rdfa/0000755000175000017500000000000011204354476013624 5ustar nachonachordflib-2.4.2/test/rdfa/000001.htm0000644000175000017500000000357511153616022015057 0ustar nachonacho Ben Adida's Card

Ben Adida, FOAF Card

This page contains my contact information and contacts.
Ben Adida (call me Ben)
homepage | photo

32 Vassar Street
MIT CSAIL Room 32G-694
Cambridge 02139
USA

People I know

rdflib-2.4.2/test/rdfa/000001.ttl0000644000175000017500000000307611153616022015066 0ustar nachonacho<#i> "Ben"^^ . <#i> "Adida"^^ . <#i> "Ben"^^ . <#i> . <#i> . <#i> _:office . _:office _:address . _:address "32 Vassar Street"^^ . _:address "MIT CSAIL Room 32G-694"^^ . _:address "Cambridge"^^ . _:address "02139"^^ . _:address "USA"^^ . <#i> . <#i> . <#i> . <#i> . <> . rdflib-2.4.2/test/rdfa/000002.htm0000644000175000017500000000110611153616022015044 0ustar nachonacho This photo was taken by Mark Birbeck. rdflib-2.4.2/test/rdfa/000002.ttl0000644000175000017500000000020111153616022015052 0ustar nachonacho "Mark Birbeck"^^ . rdflib-2.4.2/test/rdfa/000003.htm0000644000175000017500000000117211153616022015050 0ustar nachonacho
This photo was taken by Mark Birbeck.
rdflib-2.4.2/test/rdfa/000003.ttl0000644000175000017500000000020111153616022015053 0ustar nachonacho "Mark Birbeck"^^ . rdflib-2.4.2/test/rdfa/000004.htm0000644000175000017500000000122111153616022015044 0ustar nachonacho
This photo was taken by Mark Birbeck .
rdflib-2.4.2/test/rdfa/000004.ttl0000644000175000017500000000017411153616022015065 0ustar nachonacho_:span0 "Mark Birbeck"^^ . rdflib-2.4.2/test/rdfa/000005.htm0000644000175000017500000000126711153616022015057 0ustar nachonacho rdflib-2.4.2/test/rdfa/000005.ttl0000644000175000017500000000034111153616022015062 0ustar nachonacho . "Internet Applications" . rdflib-2.4.2/test/rdfa/000006.htm0000644000175000017500000000123111153616022015047 0ustar nachonacho This document is licensed under a Creative Commons License. rdflib-2.4.2/test/rdfa/000006.ttl0000644000175000017500000000013611153616022015065 0ustar nachonacho<> . rdflib-2.4.2/test/rdfa/000007.htm0000644000175000017500000000117111153616022015053 0ustar nachonacho This photo was taken by Mark Birbeck. rdflib-2.4.2/test/rdfa/000007.ttl0000644000175000017500000000107111153616022015065 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . dc:creator . foaf:img . rdflib-2.4.2/test/rdfa/000008.htm0000644000175000017500000000125611153616022015060 0ustar nachonacho This photo was taken by Mark Birbeck. rdflib-2.4.2/test/rdfa/000008.ttl0000644000175000017500000000114411153616022015067 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . dc:creator . foaf:img . dc:title "Portrait of Mark" . rdflib-2.4.2/test/rdfa/000009.htm0000644000175000017500000000273411153616022015063 0ustar nachonacho Mark's Publications

Tags

Standards
XForms

Publications

A Standards-Based Virtual Machine
XForms and Internet Applications
rdflib-2.4.2/test/rdfa/000009.ttl0000644000175000017500000000221111153616022015064 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . @prefix biblio: . @prefix taxo: . <#tag_standards> rdf:type taxo:topic . <#tag_xforms> rdf:type taxo:topic . <#publication_1> rdf:type biblio:Publication . <#publication_1> dc:creator . <#publication_1> dc:title "A Standards-Based Virtual Machine"^^rdf:XMLLiteral . <#publication_1> taxo:topics <#tag_standards> . <#publication_2> rdf:type biblio:Publication . <#publication_2> dc:creator . <#publication_2> dc:title "XForms and Internet Applications"^^rdf:XMLLiteral . <#publication_2> taxo:topics <#tag_standards> . <#publication_2> taxo:topics <#tag_xforms> . rdflib-2.4.2/test/rdfa/000010.htm0000644000175000017500000000122411153616022015044 0ustar nachonacho This document is licensed under a Creative Commons . rdflib-2.4.2/test/rdfa/000010.ttl0000644000175000017500000000077111153616022015065 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . <> cc:license . rdflib-2.4.2/test/rdfa/000011.htm0000644000175000017500000000115211153616022015045 0ustar nachonacho rdflib-2.4.2/test/rdfa/000011.ttl0000644000175000017500000000101611153616022015057 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . foaf:knows . rdflib-2.4.2/test/rdfa/000012.htm0000644000175000017500000000117411153616022015052 0ustar nachonacho rdflib-2.4.2/test/rdfa/000012.ttl0000644000175000017500000000114511153616022015063 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . foaf:knows . foaf:knows . rdflib-2.4.2/test/rdfa/000013.htm0000644000175000017500000000123511153616022015051 0ustar nachonacho Author: Albert Einstein

E = mc2: The Most Urgent Problem of Our Time

rdflib-2.4.2/test/rdfa/000013.ttl0000644000175000017500000000107711153616022015070 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . <> dc:title "E = mc2: The Most Urgent Problem of Our Time"^^rdf:XMLLiteral . <> dc:creator "Albert Einstein"^^rdf:XMLLiteral . rdflib-2.4.2/test/rdfa/000014.htm0000644000175000017500000000114011153616022015045 0ustar nachonacho rdflib-2.4.2/test/rdfa/000014.ttl0000644000175000017500000000077111153616022015071 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . "chat"@fr . rdflib-2.4.2/test/rdfa/000015.htm0000644000175000017500000000121511153616022015051 0ustar nachonacho Example Title rdflib-2.4.2/test/rdfa/000015.ttl0000644000175000017500000000077111153616022015072 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . "chat"@fr . rdflib-2.4.2/test/rdfa/000016.htm0000644000175000017500000000114011153616022015047 0ustar nachonacho ten rdflib-2.4.2/test/rdfa/000016.ttl0000644000175000017500000000103211153616022015062 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . "10"^^ . rdflib-2.4.2/test/rdfa/000017.htm0000644000175000017500000000144211153616022015055 0ustar nachonacho

Rodion Romanovitch! My dear friend! If you go on in this way you will go mad, I am positive! Drink, pray, if only a few drops!

rdflib-2.4.2/test/rdfa/000017.ttl0000644000175000017500000000100011153616022015056 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . _:a dc:source . _:a dc:creator "Fyodor Dostoevsky" . rdflib-2.4.2/test/rdfa/000018.htm0000644000175000017500000000113611153616022015056 0ustar nachonacho ten rdflib-2.4.2/test/rdfa/000018.ttl0000644000175000017500000000075611153616022015100 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . "10" . rdflib-2.4.2/test/rdfa/000019.htm0000644000175000017500000000152011153616022015054 0ustar nachonacho

Rodion Romanovitch! My dear friend! If you go on in this way you will go mad, I am positive! Drink, pray, if only a few drops!

rdflib-2.4.2/test/rdfa/000019.ttl0000644000175000017500000000100011153616022015060 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . _:a dc:source . _:a dc:creator "Fyodor Dostoevsky" . rdflib-2.4.2/test/rdfa/000020.htm0000644000175000017500000000133211153616022015045 0ustar nachonacho rdflib-2.4.2/test/rdfa/000020.ttl0000644000175000017500000000106511153616022015063 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . _:a foaf:mbox . _:b foaf:mbox . _:a foaf:knows _:b . rdflib-2.4.2/test/rdfa/000021.htm0000644000175000017500000000150611153616022015051 0ustar nachonacho
This document is licensed under a Creative Commons License which, among other things, requires that you provide attribution to the author, Ben Adida.
rdflib-2.4.2/test/rdfa/000021.ttl0000644000175000017500000000103511153616022015061 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . <> cc:license . <> dc:creator . rdflib-2.4.2/test/rdfa/000022.htm0000644000175000017500000000207311153616022015052 0ustar nachonacho Dan's home page
Dan is located at latitude 51.47026 and longitude -2.59466

Dan Brickley

rdflib-2.4.2/test/rdfa/000022.ttl0000644000175000017500000000137211153616022015066 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . @prefix geo: . <> dc:title "Dan's home page"^^rdf:XMLLiteral . _:geolocation geo:lat "51.47026" . _:geolocation geo:long "-2.59466" . <#person> rdf:type foaf:Person . <#person> foaf:homepage <> . <#person> foaf:based_near _:geolocation . <#person> foaf:name "Dan Brickley"^^rdf:XMLLiteral . rdflib-2.4.2/test/rdfa/000023.htm0000644000175000017500000000116411153616022015053 0ustar nachonacho Mark Birbeck . rdflib-2.4.2/test/rdfa/000023.ttl0000644000175000017500000000076711153616022015076 0ustar nachonacho@prefix cc: . @prefix dc: . @prefix ex: . @prefix foaf: . @prefix rdf: . @prefix rdfs: . @prefix svg: . @prefix xh11: . @prefix xsd: . dc:creator "Mark Birbeck"^^xsd:string . rdflib-2.4.2/test/trix/0000755000175000017500000000000011204354476013676 5ustar nachonachordflib-2.4.2/test/trix/aperture.trix0000644000175000017500000001233611153616024016433 0ustar nachonacho file:/home/grimnes/tmp/aperture/ file:/home/grimnes/tmp/aperture/ http://aperture.semanticdesktop.org/ontology/data#date 2006-10-04T15:16:17 file:/home/grimnes/tmp/aperture/ http://aperture.semanticdesktop.org/ontology/data#name aperture file:/home/grimnes/tmp/aperture/ http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/ file:/home/grimnes/tmp/aperture/file1 http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/ file:/home/grimnes/tmp/aperture/test/ http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/ file:/home/grimnes/tmp/aperture/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#DataObject file:/home/grimnes/tmp/aperture/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#FolderDataObject file:/home/grimnes/tmp/aperture/ http://aperture.semanticdesktop.org/ontology/data#rootFolderOf source:testSource file:/home/grimnes/tmp/aperture/file1 file:/home/grimnes/tmp/aperture/file1 http://aperture.semanticdesktop.org/ontology/data#date 2006-10-04T15:16:15 file:/home/grimnes/tmp/aperture/file1 http://aperture.semanticdesktop.org/ontology/data#name file1 file:/home/grimnes/tmp/aperture/file1 http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/ file:/home/grimnes/tmp/aperture/file1 http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#DataObject file:/home/grimnes/tmp/aperture/file1 http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#FileDataObject file:/home/grimnes/tmp/aperture/test/ file:/home/grimnes/tmp/aperture/test/ http://aperture.semanticdesktop.org/ontology/data#date 2006-10-04T15:16:20 file:/home/grimnes/tmp/aperture/test/ http://aperture.semanticdesktop.org/ontology/data#name test file:/home/grimnes/tmp/aperture/test/ http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/ file:/home/grimnes/tmp/aperture/test/file2 http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/test/ file:/home/grimnes/tmp/aperture/test/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#DataObject file:/home/grimnes/tmp/aperture/test/ http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#FolderDataObject file:/home/grimnes/tmp/aperture/test/file2 file:/home/grimnes/tmp/aperture/test/file2 http://aperture.semanticdesktop.org/ontology/data#date 2006-10-04T15:16:20 file:/home/grimnes/tmp/aperture/test/file2 http://aperture.semanticdesktop.org/ontology/data#name file2 file:/home/grimnes/tmp/aperture/test/file2 http://aperture.semanticdesktop.org/ontology/data#partOf file:/home/grimnes/tmp/aperture/test/ file:/home/grimnes/tmp/aperture/test/file2 http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#DataObject file:/home/grimnes/tmp/aperture/test/file2 http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://aperture.semanticdesktop.org/ontology/data#FileDataObject rdflib-2.4.2/test/trix/nokia_example.trix0000644000175000017500000001134611153616024017420 0ustar nachonacho http://example.org/Bob http://example.org/wife http://example.org/Mary http://example.org/Bob http://example.org/wife Bob http://example.org/Mary http://example.org/age 32 http://example.org/Widget http://example.org/dimensions x x http://example.org/length 12 x http://example.org/width 46 http://example.org/binfo http://example.org/aBook http://purl.org/dc/elements/1.1/title <ex:title>A Good Book</ex:title> http://example.org/aBook http://www.w3.org/2000/01/rdf-schema#comment This is a really good book! binfo http://example.org/source http://example.org/book-description.rdf http://example.org/book-description.rdf http://example.org/authority http://example.org/Bob http://example.org/tests/language-tag-case-not-significant http://example.org/entailmentRules http://www.w3.org/1999/02/22-rdf-syntax-ns# http://example.org/tests/language-tag-case-not-significant http://example.org/premise http://example.org/tests/graph1 http://example.org/tests/language-tag-case-not-significant http://example.org/conclusion http://example.org/tests/graph2 http://example.org/tests/graph1 x http://example.org/property a http://example.org/tests/graph2 x http://example.org/property a http://example.org/rules/rdf1 http://example.org/premise http://example.org/rules/rdf1/premise http://example.org/rules/rdf1 http://example.org/conclusion http://example.org/rules/rdf1/conclusion http://example.org/rules/rdf1/premise http://example.org/rules/rdf1/variables/aaa http://example.org/rules/rdf1/variables/uuu http://example.org/rules/rdf1/variables/yyy http://example.org/rules/rdf1/conclusion http://example.org/rules/rdf1/variables/aaa http://www.w3.org/1999/02/22-rdf-syntax-ns#type http://www.w3.org/1999/02/22-rdf-syntax-ns#Property rdflib-2.4.2/test/type_check.py0000644000175000017500000000175311153616026015401 0ustar nachonachoimport unittest from rdflib.Graph import Graph from rdflib.exceptions import SubjectTypeError from rdflib.exceptions import PredicateTypeError from rdflib.exceptions import ObjectTypeError from rdflib.URIRef import URIRef foo = URIRef("foo") class TypeCheckCase(unittest.TestCase): unstable = True # TODO: until we decide if we want to add type checking back to rdflib backend = 'default' path = 'store' def setUp(self): self.store = Graph(backend=self.backend) self.store.open(self.path) def tearDown(self): self.store.close() def testSubjectTypeCheck(self): self.assertRaises(SubjectTypeError, self.store.add, (None, foo, foo)) def testPredicateTypeCheck(self): self.assertRaises(PredicateTypeError, self.store.add, (foo, None, foo)) def testObjectTypeCheck(self): self.assertRaises(ObjectTypeError, self.store.add, (foo, foo, None)) rdflib-2.4.2/test/aggregate_graphs.pyc0000644000175000017500000001377711164176136016736 0ustar nachonachoÑò ¯Ic @s$ddkZddkZddklZddklZlZlZlZddk l Z ddk l Z ddk lZlZlZddklZdZd Zd Zd Zd Zd Zdeifd„ƒYZdeifd„ƒYZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿN(t Namespace(tplugintRDFtRDFStURIRef(tStore(tStringIO(tGraphtReadOnlyGraphAggregatetConjunctiveGraph(tpprints» @prefix rdf: . @prefix rdfs: . @prefix : . :foo a rdfs:Class. :bar :d :c. :a :d :c. s @prefix rdf: . @prefix rdfs: . @prefix : . @prefix log: . :foo a rdfs:Resource. :bar rdfs:isDefinedBy [ a log:Formula ]. :a :d :e. sÛ @prefix rdf: . @prefix rdfs: . @prefix log: . @prefix : . <> a log:N3Document. s PREFIX rdfs: SELECT * FROM NAMED FROM NAMED FROM NAMED FROM WHERE {?sub ?pred rdfs:Class }so PREFIX rdfs: SELECT ?class WHERE { GRAPH ?graph { ?member a ?class } }s— PREFIX rdfs: PREFIX log: SELECT ?n3Doc WHERE {?n3Doc a log:N3Document }tGraphAggregates1cBseZd„Zd„ZRS(cCs»tidtƒƒ}t|ƒ|_t|ƒ|_t|ƒ|_xQt|ift|ift |ifgD]%\}}|i t |ƒddƒqmWt |i|i|igƒ|_ dS(NtIOMemorytformattn3(RtgetRRtgraph1tgraph2tgraph3t testGraph1N3t testGraph2N3t testGraph3N3tparseRRtG(tselftmemStoretn3Strtgraph((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pytsetUp?s  cCs|tt|iidtidfƒƒƒdjpt‚tt|iitdƒddfƒƒƒdjpt‚tt|iidtdƒdfƒƒƒdjpt‚t|iƒdjpt‚x.|ii ƒD]}t |t ƒpt‚qÛWtdƒtit i f|ijpt‚tdƒt ig}tt|iitdƒ|dfƒƒƒdjpt‚dS(Nishttp://test/baris http://test/diishttp://test/foo(tlentlistRttriplestNoneRttypetAssertionErrorRtcontextst isinstanceRRtResourcet isDefinedByttriples_choices(Rtgt barPredicates((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyttestAggregateRawLs8;;,(t__name__t __module__RR*(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyR >s tGraphAggregates2cBseZd„Zd„ZRS(cCsìtidtƒƒ}t|tdƒƒ|_t|tdƒƒ|_t|tdƒƒ|_xQt|ift |ift |ifgD]%\}}|i t |ƒddƒqˆWt|t iƒ|_|ii t iƒt|ƒ|_dS(NR shttp://example.com/graph1shttp://example.com/graph2shttp://example.com/graph3R R(RRRRRRRRRRRRRRtRDFSNStgraph4R R(RRRR((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyR`s  cCs¢tGH|iitƒ}t|ƒdjpt‚tdƒ}|iitdhtdƒd6ƒ}|idƒd|i jptt t |idƒƒƒ‚dS(Niu#http://www.w3.org/2000/10/swap/log#t initBindingsshttp://example.com/graph3u?graphtpythoni( tsparqlQRtqueryRR"RtsparqlQ2Rt serializet N3DocumenttreprR(RtrttLOG_NS((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyttestAggregateSPARQLos  %(R+R,RR:(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyR-_s tGraphAggregates3cBseZd„Zd„ZRS(cCsÄtidtƒƒ}t|tdƒƒ|_t|tdƒƒ|_t|tdƒƒ|_xQt|ift |ift |ifgD]%\}}|i t |ƒddƒqˆWt |ƒ|_dS(NR RRRR R(RRRRRRRRRRRRRR R(RRRR((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyRzs  cCs?|iitƒp td‚|iitƒ p td‚dS(Ns(CG as default graph should *all* triplessEGraph as default graph should *not* include triples from other graphs(RR3tsparqlQ3R"R(R((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyttestDefaultGraph†s(R+R,RR=(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyR;ys t__main__(tsystunittesttrdflib.NamespaceRtrdflibRRRRt rdflib.storeRt cStringIORt rdflib.GraphRRR R RRRR2R4R<tTestCaseR R-R;R+tmain(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/aggregate_graphs.pyts$  "   ! rdflib-2.4.2/test/n3_2.pyc0000644000175000017500000000607111164176136014172 0ustar nachonachoÑò ¯Ic @sŸddklZlZlZlZddklZlZddklZddkl Z l Z ddk Z ddk l Z edƒZ dZd dd „ZdS( iÿÿÿÿ(tURIReftBNodetLiteraltVariable(tRDFtRDFS(tStringInputSource(t QuotedGraphtConjunctiveGraphN(tpprints*http://www.w3.org/2000/10/swap/log#impliessÓ @prefix rdf: . @prefix rdfs: . @prefix : . {:a :b :c;a :foo} => {:a :d :c,?y}. _:foo a rdfs:Class. :a :d :c.tdefaultc CsÌtd|ƒ}|o|i|ƒ|i|ƒn|ittƒddƒ|iGHyVx5|idt dfƒD]\}}}|}|}qqWt |ƒt jot |ƒt jpt ‚t dƒ}t dƒ} t dƒ} t dƒ} tdƒ} t|iƒ} tt| i|t |fƒƒƒd jpt ‚tt|idd| fƒƒƒd jpt ‚xe|id| dfƒD]K\}}}|| jo/t|tƒpt ‚|| jpt ‚q…q…Wt| iti tiƒƒd }t|tƒpt ‚tt| idt dfƒƒƒd jpt ‚tt| idti dfƒƒƒd jpt ‚tt|idti dfƒƒƒd jpt ‚tt|id ƒƒƒd jpt ‚tt|idƒƒƒd jpt ‚tt| idƒƒƒd jpt ‚tt|idt dƒdfƒƒƒd jpt ‚tt| idt dƒdfƒƒƒd jpt ‚tt| i|| | fƒƒƒd jpt ‚| idt dfƒtt| idt dfƒƒƒd jpt ‚tt|idƒƒƒd jpt ‚tt|idƒƒƒd jpt ‚|id| dfƒtt|idƒƒƒd jpt ‚|idti dfƒtt|idƒƒƒd jpt ‚| idti tifƒ| i|ƒtt| idti dfƒƒƒd jpt ‚t| ƒd jpt ‚t|ƒd jpt ‚| idƒt| ƒd jpt ‚|ii|ƒWn|ii|ƒ‚nXdS(Ntstoretformattn3s http://test/as http://test/bs http://test/cs http://test/dtyiiii(NNN(NNN(NNN(NNN(NNN(NNN(NNN(NNN(RtdestroytopentparseRttestN3R ttriplestNonetimpliesttypeRtAssertionErrorRRtlentlistt isinstancetsubjectsRRtClassRtcontextstremovetremove_context(R t configStringtgtstptotformulaAtformulaBtatbtctdtvtuniverse((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3_2.pyt testN3Storesn  -     22 "255)))8822)))) 5 (trdflibRRRRRRRt rdflib.GraphRRtsysR RRRR-(((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3_2.pyts"  rdflib-2.4.2/test/sparql_limit.py0000644000175000017500000000161511153616026015760 0ustar nachonachofrom rdflib import ConjunctiveGraph, plugin from rdflib.store import Store from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . """ test_query = """ PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . } LIMIT 2 """ class TestLimit(unittest.TestCase): def testLimit(self): graph = ConjunctiveGraph(plugin.get('IOMemory',Store)()) graph.parse(StringIO(test_data), format="n3") results = graph.query(test_query) self.failUnless(len(results) == 2) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/sparql_regex.py0000644000175000017500000000173711153616026015761 0ustar nachonachofrom rdflib import ConjunctiveGraph, plugin from rdflib.store import Store from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . """ test_query = """ PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . FILTER regex(?name, "a", "i") } """ class TestRegex(unittest.TestCase): def testRegex(self): graph = ConjunctiveGraph(plugin.get('IOMemory',Store)()) graph.parse(StringIO(test_data), format="n3") results = graph.query(test_query) self.failUnless(len([a for a in results if 'a' in a[0] or 'A' in a[0]]) == 3) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/rdf.pyc0000644000175000017500000000525711164176136014211 0ustar nachonachoÑò ¯Ic@sŸddkZddkTddklZddklZddklZedƒZdZ dei fd „ƒYZ d „Z e d joeid d ƒndS(iÿÿÿÿN(t*(tGraph(tRDF(tStringInputSourceshttp://xmlns.com/foaf/0.1/s¦ Donna Fales donna t RDFTestCasecBs8eZdZdZd„Zd„Zd„Zd„ZRS(tdefaulttstorecCsRtd|iƒ|_|ii|iƒ|iiddƒ|iiddƒdS(NRtdcs'http://http://purl.org/dc/elements/1.1/tfoafshttp://xmlns.com/foaf/0.1/(RtbackendRtopentpathtbind(tself((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pytsetUpscCs|iiƒdS(N(Rtclose(R ((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pyttearDown$scCsztƒ|_}|ii|titdfƒ|ii|tdtdƒfƒ|ii|tdtdƒfƒdS(NtPersontnicktdonnatnames Donna Fales(tBNodeRRtaddRttypetFOAFtLiteral(R R((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pytaddDonna's #cCsU|iƒtƒ}|it|iiddƒƒƒ|i|ii|ƒtƒdS(Ntformats pretty-xml( RRtparseRRt serializet assertEqualst isomorphictTrue(R tg((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pyt testRDFXML-s  "(t__name__t __module__R R RRRR"(((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pyRs    cCs titƒS(N(tunittestt makeSuiteR(((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pyt test_suite3st__main__t defaultTestR'(R%trdflibt rdflib.GraphRRtrdflib.StringInputSourceRt NamespaceRtrdfxmltTestCaseRR'R#tmain(((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdf.pyts     rdflib-2.4.2/test/rdfdiff.pyc0000644000175000017500000001114511164176136015033 0ustar nachonachoÑò ¯Ic@s¥dZddkZddkZddkZddkZddklZeidƒe_defd„ƒYZ d„Z d„Z d „Z e d jo e ƒndS( sÑ RDF Graph Isomorphism Tester Author: Sean B. Palmer, inamidst.com Uses the pyrple algorithm Requirements: Python2.4+ http://inamidst.com/proj/rdf/ntriples.py Usage: ./rdfdiff.py iÿÿÿÿN(tbNodes <([^\s"<>]+)>tGraphcBsYeZddd„Zd„Zd„Zd„Zd„Zed„Z d„Z d„Z RS( cCs@tƒ|_|o|i|ƒn|o|i|ƒndS(N(tsetttriplestparset parse_string(tselfturitcontent((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyt__init__s  cs[dtf‡fd†ƒY}tid|ƒƒ}ti|ƒ}|i|ƒ|iƒdS(NtSinkcseZ‡fd†ZRS(csˆii|||fƒdS(N(Rtadd(tsinktstpto(R(s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyttriples(t__name__t __module__R((R(s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR sR (tobjecttntriplestNTriplesParserturllibturlopenRtclose(RRR Rtu((Rs7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyRs  csBdtf‡fd†ƒY}tid|ƒƒ}|i|ƒdS(NR cseZ‡fd†ZRS(csˆii|||fƒdS(N(RR (R R RR(R(s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR&s(RRR((R(s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR %sR (RRRt parsestring(RRR R((Rs7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR$scCsttt|iƒƒƒƒS(N(thashttupletsortedt hashtriples(R((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyt__hash__,sc#s?x8ˆiD]-}‡fd†|Dƒ}tt|ƒƒVq WdS(Nc3s9x2|]+}t|tƒoˆi|ƒp|VqWdS(N(t isinstanceRtvhash(t.0tt(R(s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pys 1s (RRR(RRtg((Rs7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR/s cCstt|i||ƒƒƒS(N(RRt vhashtriples(Rttermtdone((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR!4sccsCx<|iD]1}||jot|i|||ƒƒVq q WdS(N(RRt vhashtriple(RR&R'R#((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR%7s  ccsuxntdƒD]`}t||tƒp ||Vq |p|||jo |Vq |i||dtƒVq WdS(NiR'(txrangeR RR!tTrue(RRR&R'R((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyR(;s   N( RRtNoneR RRRRtFalseR!R%R((((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyRs     cCs"tt|ƒƒtt|ƒƒjS(N(RR(Rtq((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pytcompareAscCs(ttd|ƒƒttd|ƒƒjS(NR(RR(RR-((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pytcompare_from_stringDscCs*ttidtidƒ}d|GHdS(Niitnotyes(snosyes(R.tsystargv(tresult((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pytmainGst__main__(t__doc__R2treRRRtcompiletr_urirefRRR.R/R5R(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfdiff.pyt s$ /    rdflib-2.4.2/test/rdfa.py0000644000175000017500000000542611153616026014200 0ustar nachonacho#!/usr/bin/python # # test.py - RDFa Test Suite # import os, sys, string import rdfdiff import unittest import ntriples from rdfdiff import Graph from rdflib import ConjunctiveGraph as RGraph from rdflib import StringInputSource from rdflib import URIRef from rdflib import BNode from rdflib import Literal def main(): suite = unittest.TestSuite() for test in make_cases(): suite.addTest(test) print "\n------\nRDFa Parser Tests\n-----\n" unittest.TextTestRunner(verbosity=2,descriptions=1).run(suite) def make_cases(): testdir = "test/rdfa" verbose = False tests = [os.path.splitext(f)[0] for f in os.listdir(testdir) if os.path.splitext(f)[1] == ".htm" ] tests.sort() for testname in tests: yield RDFaTestStub(os.path.abspath(os.path.join(testdir,testname))) # expose each test for e.g. Nose to run def all_tests(): for test in make_cases(): yield test.runTest, all_tests.unstable = False class RDFaTestStub(unittest.TestCase): def __init__(self, testbase): unittest.TestCase.__init__(self) self.testbase = testbase self.pubId = 'http://example.com/' def shortDescription(self): return str(os.path.basename(self.testbase)) def nodeToString(self, node): if isinstance(node, BNode): bid = node.n3() if(bid[0:4] == '_:_:'): bid = bid[2:] return ntriples.bNode(str(bid)) elif isinstance(node, URIRef): if len(str(node)) == 0: return ntriples.URI(self.pubId) return ntriples.URI(str(node)) elif isinstance(node, Literal): return ntriples.Literal(str(node), lang= node.language or None, dtype= node.datatype or None) else: raise Exception("unexpected node value") def runTest(self): testfile = self.testbase + ".htm" resultsf = self.testbase + ".ttl" self.failIf(not os.path.isfile(resultsf), "missing expected results file.") store1 = RGraph() store1.load(resultsf, publicID=self.pubId, format="n3") pcontents = store1.serialize(format='nt') pg = Graph() for a, b, c in store1: pg.triples.add(tuple(map(self.nodeToString, (a,b,c)))) #print tuple(map(self.nodeToString, (a,b,c))) store2 = RGraph() store2.load(testfile, publicID=self.pubId, format="rdfa") qcontents = store2.serialize(format='nt') qg = Graph() for a, b, c in store2: qg.triples.add(tuple(map(self.nodeToString, (a,b,c)))) self.failIf(not hash(pg) == hash(qg), "In %s: results do not match.\n%s\n\n%s" % (self.shortDescription(), pcontents, qcontents)) if __name__ == '__main__': main() rdflib-2.4.2/test/triple_store.py0000644000175000017500000000143611153616026015774 0ustar nachonachoimport unittest from rdflib import URIRef, BNode, Literal, RDFS from rdflib.Graph import Graph class GraphTest(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): self.store = Graph(store=self.backend) self.store.open(self.path) self.remove_me = (BNode(), RDFS.label, Literal("remove_me")) self.store.add(self.remove_me) def tearDown(self): self.store.close() def testAdd(self): subject = BNode() self.store.add((subject, RDFS.label, Literal("foo"))) def testRemove(self): self.store.remove(self.remove_me) self.store.remove((None, None, None)) def testTriples(self): for s, p, o in self.store: pass if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/sparql_parser_instability.py0000644000175000017500000000041311153616026020544 0ustar nachonachoBAD_SPARQL=\ """ BASE . SELECT ?s WHERE { ?s ?p ?o }""" def test_bad_sparql(): from rdflib.Graph import Graph Graph().query(BAD_SPARQL) test_bad_sparql.unstable = True if __name__ == '__main__': test_bad_sparql() rdflib-2.4.2/test/test_datatype_parsing.py0000644000175000017500000000171711153616026017660 0ustar nachonacho# -*- coding: UTF-8 -*- import unittest from pprint import pprint from rdflib import ConjunctiveGraph, URIRef, Literal, RDFS, Namespace from rdflib.Literal import _XSD_NS from StringIO import StringIO from sets import Set testContent = """ @prefix : . @prefix xsd: . :xi2 :p "1"^^xsd:integer . :xd3 :p "1"^^xsd:double . """ exNS = Namespace("http://example.org/things#") double1 = Literal('1',datatype=_XSD_NS.double) class TestSparqlOPT_FILTER(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_OPT_FILTER(self): xd3Objs = [o for o in self.graph.objects(subject=exNS.xd3,predicate=exNS.p)] self.failUnless(xd3Objs[0].datatype == _XSD_NS.double, "Expecting %r, got instead : %r"%(double1,xd3Objs[0])) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/test_sparql_equals.pyc0000644000175000017500000000340711164177226017345 0ustar nachonachoÑò ¯Ic@sjddklZlZddklZddkZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿ(tConjunctiveGraphtURIRef(tStringIONtTestSparqlEqualscBs'eZhdd6Zd„Zd„ZRS(s%http://www.w3.org/2000/01/rdf-schema#trdfscCs=d|i}tƒ|_}|iit|ƒddƒdS(Nsü @prefix rdfs: <%(rdfs)s> . rdfs:label "Document 1"@en . rdfs:label "Document 2"@en . rdfs:label "Document 3"@en . tformattn3(tPREFIXESRtgraphtloadR(tselft testContentR((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_equals.pytsetUp s cCsStdƒ}d|d|i}|ii|ƒ}|g}|i|i|ƒdS(Nshttp://example.org/doc/1s PREFIX rdfs: <%(rdfs)s> SELECT ?uri WHERE { ?uri rdfs:label ?label . FILTER( ?uri = ) } (RRRtqueryt assertEqualtselected(R turiR trestexpected((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_equals.pyttest_uri_equalss   (t__name__t __module__RR R(((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_equals.pyRs  t__main__( trdflibRRRtunittesttTestCaseRRtmain(((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_equals.pyts   rdflib-2.4.2/test/parser.py0000644000175000017500000000211311153616026014546 0ustar nachonachoimport unittest from rdflib.Graph import Graph from rdflib import URIRef, BNode, Literal, RDF, RDFS from rdflib.StringInputSource import StringInputSource class ParserTestCase(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): self.graph = Graph(store=self.backend) self.graph.open(self.path) def tearDown(self): self.graph.close() def testNoPathWithHash(self): g = self.graph g.parse(StringInputSource("""\ testing """), publicID="http://example.org") subject = URIRef("http://example.org#") label = g.value(subject, RDFS.label) self.assertEquals(label, Literal("testing")) type = g.value(subject, RDF.type) self.assertEquals(type, RDFS.Class) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/parser_rdfcore.pyc0000644000175000017500000001646211164176136016436 0ustar nachonachoÑò ¯Ic@scddkZddklZlZlZlZlZddklZddk l Z ddk l Z ddk lZddkZeidƒZdZdd klZddkZeeiƒZd „Zd e fd „ƒYZed ƒZddkZd„Zd„Zd„Zdeifd„ƒYZ edƒZ!edƒZ"e ƒZ#edƒZ$e#i%e$e"dedƒfƒe#i%e$ei&edƒfƒe#i%e$ei'edƒfƒe(djoœe ƒZ)e)i*dƒddkZddk+Z+y)e+i+ei,dddgƒ\Z-Z.Wn)e+i/j oZ0ee0ƒe1ƒnXzei,Z,xÓei,dD]ÄZ2dZee2ƒZ3ede3ƒe3ei4ed fe)jo+ee3e)ƒZ5ed!d"d#ge5ƒq`e3ei4ed$fe)jo+ee3e)ƒZ5ed%d"d#ge5ƒq`ed&e3ƒq`We6e,ƒdjoei7ƒnWde#i8d'ƒXndS((iÿÿÿÿN(tURIReftBNodetLiteraltRDFtRDFS(t Namespace(t ParserError(tGraph(tfirsttparser_rdfcorei(t StreamWritercCsti|dƒdS(Ns (t_loggertinfo(tmsg((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pytwritest TestStorecBseZd„Zd„ZRS(cCs tt|ƒiƒ||_dS(N(tsuperRt__init__texpected(tselfR((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyRscCs¯|\}}}t|tƒ olt|tƒ o[|||f|ijo>d|iƒ|iƒ|iƒf}tot|ƒqˆqŒntt|ƒi|||fƒdS(Nu)Triple not in expected result: %s, %s, %s( t isinstanceRRtn3tverboseRRRtadd(Rt.1tstptotm((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyRs "%(t__name__t __module__RR(((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyRs s7http://www.w3.org/2000/10/rdf-tests/rdfcore/testSchema#cCsd|S(Ns,http://www.w3.org/2000/10/rdf-tests/rdfcore/((trel((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pytresolve*sc Cstotd|ƒnd}t|i|tdƒƒ}t|i|tdƒƒ}tƒ}|ddjo d}nd}|i|d |ƒt|ƒ}|ddjo d}nd}y|i|d |ƒWngtj o[}td ƒt|ƒtd ƒ|‚ytt |ƒƒWntd ƒnXd }nÓX|i |ƒpÁtd|ƒtožtdƒx@|D]8\} } } tdt | ƒt | ƒt | ƒfƒqxWtdƒxD|D]8\} } } tdt | ƒt | ƒt | ƒfƒqÅWn|d 7}n|S(Nu TESTING: %sit inputDocumenttoutputDocumentiýÿÿÿs.nttnttxmltformatsFailed 's ' failed withssorry could not dump out error.iu Failed: '%s's In: s %s %s %s.s Out: ( RRRtobjectstTESTRtloadRRttypet isomorphictrepr( turitmanifesttresulttinDoctoutDocRR%tstoretpeRRR((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyt _testPositive-sN         - 1c Cs#totd|ƒnd}t|i|tdƒƒ}tƒ}tƒ}ti|t d|fƒti|t dt fƒy8|ddjo d}nd }|i |d |ƒWn2t j o&}ti|t it d fƒn3Xtd |ƒti|t it d fƒd}|S(Nu TESTING: %siR!ttesttsystemiýÿÿÿs.ntR#R$R%t PassingRunu Failed: '%s't FailingRuni(RRRR&R'RRtresultsRtRESULTR5R(RRR)(R,R-R.R/R1R4R%R2((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyt _testNegativeWs&   "tParserTestCasecBs>eZdZdZeZd„Zd„Zd„Zd„Z RS(tdefaultR1cCs:td|iƒ|_}|i|iƒ|idƒdS(NR1s8http://www.w3.org/2000/10/rdf-tests/rdfcore/Manifest.rdf(RR1R-topentpathR((RR-((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pytsetUpuscCs|iiƒdS(N(R-tclose(R((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyttearDownzscCsÄ|i}d}}t|ititdƒƒ}|iƒxd|D]\}t|i|tdƒƒ}|t dƒjo't ||ƒ}|d7}||7}qCqCW|i |dd||fƒdS(NitNegativeParserTesttstatustAPPROVEDisFailed: %s of %s.( R-tlisttsubjectsRR)R'tsortRR&RR:t assertEquals(RR-t num_failedttotaltnegstnegRCR.((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyt testNegative}s    c CsF|i}t|ititdƒƒ}|iƒd}}xæ|D]Þ}t|i|tdƒƒ}|t dƒjo©t ||ƒ}t ƒ}t i |td|fƒt i |tdtfƒ|p!t i |titdfƒnt i |titdfƒ|d 7}||7}qCqCW|i|dd ||fƒdS( NtPositiveParserTestiRCRDR4R5R6R7isFailed: %s of %s.(R-RERFRR)R'RGRR&RR3RR8RR9R5RH( RR-turisRIRJR,RCR.R4((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyt testPositiveŠs$    ! ( RRR1R>tTruetslowtestR?RARMRP(((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pyR;ps   s.http://www.w3.org/2002/03owlt/resultsOntology#shttp://xmlns.com/foaf/0.1/R5thomepageshttp://rdflib.net/tRDFLibtt__main__s8http://www.w3.org/2000/10/rdf-tests/rdfcore/Manifest.rdfish:thelpu Testing: %sRNuPositive test %stPASSEDtFAILEDRBuNegative test %su %s not ??s results.rdf(9tunittesttrdflibRRRRRtrdflib.NamespaceRtrdflib.exceptionsRt rdflib.GraphRt rdflib.utilRtloggingt getLoggerR Rtencodings.utf_8R tsyststdouttswRRR'tosR R3R:tTestCaseR;R9tFOAFR8R5RtlabeltcommentRR-R(tgetopttargvtoptlisttargst GetoptErrorR tusagetargtcaseR)R.tlentmaint serialize(((s>/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser_rdfcore.pytsh (       * .        )    rdflib-2.4.2/test/parser.pyc0000644000175000017500000000371011164176136014722 0ustar nachonachoÑò ¯Ic@sŒddkZddklZddklZlZlZlZlZddk l Z dei fd„ƒYZ e djoeiƒndS(iÿÿÿÿN(tGraph(tURIReftBNodetLiteraltRDFtRDFS(tStringInputSourcetParserTestCasecBs/eZdZdZd„Zd„Zd„ZRS(tdefaulttstorecCs,td|iƒ|_|ii|iƒdS(NR (Rtbackendtgraphtopentpath(tself((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser.pytsetUp scCs|iiƒdS(N(R tclose(R((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser.pyttearDownscCs…|i}|itdƒddƒtdƒ}|i|tiƒ}|i|tdƒƒ|i|t i ƒ}|i|ti ƒdS(Ns testing tpublicIDshttp://example.orgshttp://example.org#ttesting( R tparseRRtvalueRtlabelt assertEqualsRRttypetClass(RtgtsubjectRR((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser.pyttestNoPathWithHashs   (t__name__t __module__R R RRR(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser.pyRs   t__main__(tunittestt rdflib.GraphRtrdflibRRRRRtrdflib.StringInputSourceRtTestCaseRRtmain(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/parser.pyts (" rdflib-2.4.2/test/test_sparql_json_results.pyc0000644000175000017500000000750111164177226020604 0ustar nachonachoÑò ¯Ic@s÷ddklZddklZddkZdZdZhZeddfed . @prefix rdf: . a foaf:Person; foaf:name "Alice"; foaf:knows . a foaf:Person; foaf:name "Bob" . s+ PREFIX foaf: sy SELECT ?name ?x ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } s¢"name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"}, "x" : {"type": "uri", "value" : "http://example.org/bob"} }toptionalsq SELECT ?name ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } }s@"vars" : [ "name", "friend" ]t select_varss1 SELECT * WHERE { ?x foaf:name ?name . } twildcards;"vars" : [ "name", "x" ]t wildcard_varss£ SELECT DISTINCT ?name WHERE { { foaf:name ?name . } UNION { foaf:name ?name . } } så{ "name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"} }, { "name" : {"type": "literal", "xml:lang" : "None", "value" : "Alice"} }tunions SELECT DISTINCT ?name WHERE { { foaf:name ?name . } UNION { foaf:name ?name . } UNION { foaf:name ?name . } } s"Alice"tunion3cs&‡fd†}dˆiƒ|_|S(Ncs$tˆ\}}|i||ƒdS(N(t test_materialt_query_result_contains(tselftquerytcorrect(ttestname(sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pyttestYsstest%s(ttitlet__name__(R R((R sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pyt make_methodXstTestSparqlJsonResultscBsbeZd„Zd„ZedƒZedƒZedƒZedƒZedƒZ edƒZ RS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtparseRt test_data(R ((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pytsetUpbs cCsQ|ii|ƒ}|iddƒ}|i|i|ƒdjd||fƒdS(NRtjsonisExpected: %s - to contain: %s(RR t serializet failUnlesstfind(R R R tresultst result_json((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pyR fsRRRRRR( Rt __module__RR Rt testOptionalt testWildcardt testUniont testUnion3ttestSelectVarsttestWildcardVars(((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pyR`s       t__main__( trdflibRRtunittestRtPROLOGUERRtTestCaseRRtmain(((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_json_results.pyts4        rdflib-2.4.2/test/n3.py0000644000175000017500000000520311153616026013575 0ustar nachonachofrom rdflib import * input = """ # Definitions of terms describing the n3 model # @keywords a. @prefix n3: <#>. @prefix log: . @prefix rdf: . @prefix rdfs: . @prefix : <#> . @forAll :s, :p, :x, :y, :z. n3:Statement a rdf:Class . n3:StatementSet a rdf:Class . n3:includes a rdfs:Property . # Cf rdf:li n3:predicate a rdf:Property; rdfs:domain n3:statement . n3:subject a rdf:Property; rdfs:domain n3:statement . n3:object a rdf:Property; rdfs:domain n3:statement . n3:context a rdf:Property; rdfs:domain n3:statement; rdfs:range n3:StatementSet . ########### Rules { :x :p :y . } log:means { [ n3:subject :x; n3:predicate :p; n3:object :y ] a log:Truth}. # Needs more thought ... ideally, we have the implcit AND rules of # juxtaposition (introduction and elimination) { { { :x n3:includes :s. } log:implies { :y n3:includes :s. } . } forall :s1 . } log:implies { :x log:implies :y } . { { { :x n3:includes :s. } log:implies { :y n3:includes :s. } . } forall :s1 } log:implies { :x log:implies :y } . # I think n3:includes has to be axiomatic builtin. - unless you go to syntax description. # syntax.n3? """ import unittest from rdflib.Graph import Graph, ConjunctiveGraph class N3TestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testFileName(self): """ Test that the n3 parser throws an Exception when using the identifier ":foo.txt", as this is not valid as per the rdf spec. """ input = """ @prefix : . :foo.txt :p :q . """ g = Graph() self.assertRaises(Exception, g.parse, StringInputSource(input), format="n3") # This isn't the expected result based on my reading of n3 bits #s = g.value(predicate=URIRef("http://www.example.com/p"), object=URIRef("http://www.example.com/q")) #self.assertEquals(s, URIRef("http://www.example.org/foo.txt")) def testModel(self): g = ConjunctiveGraph() g.parse(StringInputSource(input), format="n3") i = 0 for s, p, o in g: if isinstance(s, Graph): i += 1 self.assertEquals(i, 3) self.assertEquals(len(list(g.contexts())), 13) g.close() def testParse(self): g = ConjunctiveGraph() g.parse("http://groups.csail.mit.edu/dig/2005/09/rein/examples/troop42-policy.n3", format="n3") if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/nt.py0000644000175000017500000000060711153616026013701 0ustar nachonachoimport unittest from rdflib import * from rdflib.Graph import Graph class NTTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testModel(self): g = Graph() g.load("http://www.w3.org/2000/10/rdf-tests/rdfcore/rdfms-empty-property-elements/test002.nt", format="nt") if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/test_sparql_literal_patterns.py0000644000175000017500000000410611153616026021253 0ustar nachonacho# -*- coding: UTF-8 -*- #======================================================================= from rdflib import ConjunctiveGraph, URIRef, Literal from StringIO import StringIO from datetime import date #======================================================================= testRdf = """ @prefix rdfs: . @prefix xsd: . @prefix : . :plain "plain"; :integer 1; :float 1.1; :string "string"^^xsd:string; :date "2007-04-28"^^xsd:date; rdfs:label "Thing"@en, "Sak"@sv . """ graph = ConjunctiveGraph() graph.load(StringIO(testRdf), format='n3') PROLOGUE = """ PREFIX rdfs: PREFIX xsd: PREFIX t: """ thing = URIRef("http://example.org/thing") SPARQL = PROLOGUE+" SELECT ?uri WHERE { ?uri %s . } " TEST_DATA = [ ('plain', SPARQL % 't:plain "plain"', [thing]), ('integer', SPARQL % 't:integer 1', [thing]), ('float', SPARQL % 't:float 1.1', [thing]), ('langlabel_en', SPARQL % 'rdfs:label "Thing"@en', [thing]), ('langlabel_sv', SPARQL % 'rdfs:label "Sak"@sv', [thing]), ('string', SPARQL % 't:string "string"^^xsd:string', [thing]), ('date', SPARQL % 't:date "2007-04-28"^^xsd:date', [thing]), ] def assert_equal(name, sparql, real, expected): assert real == expected, 'Failed test "%s":\n%s\n, expected\n\t%s\nand got\n\t%s\n'\ % (name, sparql, expected, real) def test_generator(): for name, sparql, expected in TEST_DATA: res = graph.query(sparql) #print res.serialize('json') yield assert_equal, name, sparql, res.selected, expected #======================================================================= if __name__ == '__main__': from sys import argv name, sparql, expected = TEST_DATA[int(argv[1])] res = graph.query(sparql) assert_equal(name, sparql, res.selected, expected) rdflib-2.4.2/test/graph.py0000644000175000017500000001376311153616026014370 0ustar nachonachoimport unittest from tempfile import mkdtemp from rdflib import URIRef, BNode, Literal, RDF from rdflib.Graph import Graph class GraphTestCase(unittest.TestCase): store_name = 'default' path = None slowtest = True def setUp(self): self.graph = Graph(store=self.store_name) a_tmp_dir = mkdtemp() self.path = self.path or a_tmp_dir self.graph.open(self.path) self.michel = URIRef(u'michel') self.tarek = URIRef(u'tarek') self.bob = URIRef(u'bob') self.likes = URIRef(u'likes') self.hates = URIRef(u'hates') self.pizza = URIRef(u'pizza') self.cheese = URIRef(u'cheese') def tearDown(self): self.graph.close() def addStuff(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese self.graph.add((tarek, likes, pizza)) self.graph.add((tarek, likes, cheese)) self.graph.add((michel, likes, pizza)) self.graph.add((michel, likes, cheese)) self.graph.add((bob, likes, cheese)) self.graph.add((bob, hates, pizza)) self.graph.add((bob, hates, michel)) # gasp! def removeStuff(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese self.graph.remove((tarek, likes, pizza)) self.graph.remove((tarek, likes, cheese)) self.graph.remove((michel, likes, pizza)) self.graph.remove((michel, likes, cheese)) self.graph.remove((bob, likes, cheese)) self.graph.remove((bob, hates, pizza)) self.graph.remove((bob, hates, michel)) # gasp! def testAdd(self): self.addStuff() def testRemove(self): self.addStuff() self.removeStuff() def testTriples(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese asserte = self.assertEquals triples = self.graph.triples Any = None self.addStuff() # unbound subjects asserte(len(list(triples((Any, likes, pizza)))), 2) asserte(len(list(triples((Any, hates, pizza)))), 1) asserte(len(list(triples((Any, likes, cheese)))), 3) asserte(len(list(triples((Any, hates, cheese)))), 0) # unbound objects asserte(len(list(triples((michel, likes, Any)))), 2) asserte(len(list(triples((tarek, likes, Any)))), 2) asserte(len(list(triples((bob, hates, Any)))), 2) asserte(len(list(triples((bob, likes, Any)))), 1) # unbound predicates asserte(len(list(triples((michel, Any, cheese)))), 1) asserte(len(list(triples((tarek, Any, cheese)))), 1) asserte(len(list(triples((bob, Any, pizza)))), 1) asserte(len(list(triples((bob, Any, michel)))), 1) # unbound subject, objects asserte(len(list(triples((Any, hates, Any)))), 2) asserte(len(list(triples((Any, likes, Any)))), 5) # unbound predicates, objects asserte(len(list(triples((michel, Any, Any)))), 2) asserte(len(list(triples((bob, Any, Any)))), 3) asserte(len(list(triples((tarek, Any, Any)))), 2) # unbound subjects, predicates asserte(len(list(triples((Any, Any, pizza)))), 3) asserte(len(list(triples((Any, Any, cheese)))), 3) asserte(len(list(triples((Any, Any, michel)))), 1) # all unbound asserte(len(list(triples((Any, Any, Any)))), 7) self.removeStuff() asserte(len(list(triples((Any, Any, Any)))), 0) def testStatementNode(self): graph = self.graph from rdflib.Statement import Statement c = URIRef("http://example.org/foo#c") r = URIRef("http://example.org/foo#r") s = Statement((self.michel, self.likes, self.pizza), c) graph.add((s, RDF.value, r)) self.assertEquals(r, graph.value(s, RDF.value)) self.assertEquals(s, graph.value(predicate=RDF.value, object=r)) def testGraphValue(self): from rdflib.Graph import GraphValue graph = self.graph alice = URIRef("alice") bob = URIRef("bob") pizza = URIRef("pizza") cheese = URIRef("cheese") g1 = Graph() g1.add((alice, RDF.value, pizza)) g1.add((bob, RDF.value, cheese)) g1.add((bob, RDF.value, pizza)) g2 = Graph() g2.add((bob, RDF.value, pizza)) g2.add((bob, RDF.value, cheese)) g2.add((alice, RDF.value, pizza)) gv1 = GraphValue(store=graph.store, graph=g1) gv2 = GraphValue(store=graph.store, graph=g2) graph.add((gv1, RDF.value, gv2)) v = graph.value(gv1) #print type(v) self.assertEquals(gv2, v) #print list(gv2) #print gv2.identifier graph.remove((gv1, RDF.value, gv2)) def testConnected(self): graph = self.graph self.addStuff() self.assertEquals(True, graph.connected()) jeroen = URIRef("jeroen") unconnected = URIRef("unconnected") graph.add((jeroen,self.likes,unconnected)) self.assertEquals(False, graph.connected()) #class MemoryGraphTestCase(GraphTestCase): # store_name = "Memory" # slowtest = False try: import persistent # If we can import persistent then test ZODB store class ZODBGraphTestCase(GraphTestCase): store_name = "ZODB" slowtest = False except ImportError, e: print "Can not test ZODB store: %s" % e try: import RDF as Redland # don't shadow RDF ns imported above # If we can import RDF then test Redland store class RedLandTestCase(GraphTestCase): store_name = "Redland" slowtest = False except ImportError, e: print "Can not test Redland store: %s" % e if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/test_sparql_base_ref.pyc0000644000175000017500000000310111164177226017610 0ustar nachonachoÑò ¯Ic@svddklZlZddklZddkZdZdZdeifd„ƒYZe djoei ƒndS( iÿÿÿÿ(tConjunctiveGraphtLiteral(tStringIONsã @prefix foaf: . @prefix rdf: . a foaf:Person; foaf:name "Alice"; foaf:knows .sU BASE SELECT ?name WHERE { [ a :Person ; :name ?name ] }tTestSparqlJsonResultscBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtparseRt test_data(tself((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_base_ref.pytsetUps cCsC|iitƒidƒ}|i|dtdƒjd|ƒdS(NtpythonitAlicesExpected: 'Alice' Got: %s(Rtqueryt test_queryt serializet failUnlessR(R trt((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_base_ref.pyt test_base_refs(t__name__t __module__R R(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_base_ref.pyRs t__main__( trdflibRRRtunittestRRtTestCaseRRtmain(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_base_ref.pyts  rdflib-2.4.2/test/advanced_sparql_constructs.pyc0000644000175000017500000001010111164176136021034 0ustar nachonachoÑò ¯Ic @s ddkZddklZddklZlZlZlZddkl Z ddk l Z ddk l Z lZlZddkZddklZdZd Zd Zd Zd Zd eifd„ƒYZedjo,eieƒZeiddƒieƒndS(iÿÿÿÿN(t Namespace(tplugintRDFtRDFStURIRef(tStore(tStringIO(tGraphtReadOnlyGraphAggregatetConjunctiveGraph(tpprints @prefix rdf: . @prefix rdfs: . @prefix : . :foo :relatedTo [ a rdfs:Class ]; :parentOf ( [ a rdfs:Class ] ). :bar :relatedTo [ a rdfs:Resource ]; :parentOf ( [ a rdfs:Resource ] ). ( [ a rdfs:Resource ] ) :childOf :bar. ( [ a rdfs:Class ] ) :childOf :foo. s¿ BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ?node :relatedTo [ a rdfs:Class ] }s BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ?node :parentOf ( [ a rdfs:Class ] ) }sÄ BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ( [ a rdfs:Resource ] ) :childOf ?node }sˆ PREFIX owl: SELECT DISTINCT ?class FROM WHERE { ?thing a ?class }t AdvancedTestscBs5eZd„Zd„Zd„Zd„Zd„ZRS(cCsDtidtƒƒ}t|ƒ|_|iittƒddƒdS(NtIOMemorytformattn3(RtgetRRt testGraphtparseRt testGraph1N3(tselftmemStore((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pytsetUp:sc Cs{ddkl}tdƒ}|iitƒ}|i||idƒƒ||i|i |i |i t i ti gƒƒdS(Niÿÿÿÿ(tSetshttp://www.w3.org/2002/07/owl#tpython(tsetsRRRtquerytsparqlQ4t assertEqualst serializetOntologyPropertytClasstOntologytAnnotationPropertyRtPropertyR(RRtOWL_NStrt((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyttestNamedGraph?s cCs9|iitƒ}|i|idƒdtdƒƒdS(NRishttp://test/foo(RRtsparqlQ1RRR(RR#((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyttestScopedBNodesEscCs9|iitƒ}|i|idƒdtdƒƒdS(NRishttp://test/bar(RRtsparqlQ3RRR(RR#((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyt%testCollectionContentWithinAndWithoutIscCsO|iitƒ}|i|idƒdtdƒƒ|idt|ƒƒdS(NRishttp://test/fooi(RRtsparqlQ2RRRtlen(RR#((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyttestCollectionAsObjectMs#(t__name__t __module__RR$R&R(R+(((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyR 9s     t__main__t verbosityi(tunittesttrdflib.NamespaceRtrdflibRRRRt rdflib.storeRt cStringIORt rdflib.GraphRRR tsysR RR%R)R'RtTestCaseR R,t makeSuitetsuitetTextTestRunnertrun(((sJ/Users/eikeon/rdflib-svn/branches/2.4.x/test/advanced_sparql_constructs.pyts "     rdflib-2.4.2/test/aggregate_graphs.py0000644000175000017500000001153611153616026016555 0ustar nachonachoimport sys import unittest from rdflib.Namespace import Namespace from rdflib import plugin,RDF,RDFS,URIRef from rdflib.store import Store from cStringIO import StringIO from rdflib.Graph import Graph,ReadOnlyGraphAggregate,ConjunctiveGraph from pprint import pprint testGraph1N3=""" @prefix rdf: . @prefix rdfs: . @prefix : . :foo a rdfs:Class. :bar :d :c. :a :d :c. """ testGraph2N3=""" @prefix rdf: . @prefix rdfs: . @prefix : . @prefix log: . :foo a rdfs:Resource. :bar rdfs:isDefinedBy [ a log:Formula ]. :a :d :e. """ testGraph3N3=""" @prefix rdf: . @prefix rdfs: . @prefix log: . @prefix : . <> a log:N3Document. """ sparqlQ = \ """ PREFIX rdfs: SELECT * FROM NAMED FROM NAMED FROM NAMED FROM WHERE {?sub ?pred rdfs:Class }""" sparqlQ2 =\ """ PREFIX rdfs: SELECT ?class WHERE { GRAPH ?graph { ?member a ?class } }""" sparqlQ3 =\ """ PREFIX rdfs: PREFIX log: SELECT ?n3Doc WHERE {?n3Doc a log:N3Document }""" class GraphAggregates1(unittest.TestCase): def setUp(self): memStore = plugin.get('IOMemory',Store)() self.graph1 = Graph(memStore) self.graph2 = Graph(memStore) self.graph3 = Graph(memStore) for n3Str,graph in [(testGraph1N3,self.graph1), (testGraph2N3,self.graph2), (testGraph3N3,self.graph3)]: graph.parse(StringIO(n3Str),format='n3') self.G = ReadOnlyGraphAggregate([self.graph1,self.graph2,self.graph3]) def testAggregateRaw(self): #Test triples assert len(list(self.G.triples((None,RDF.type,None)))) == 4 assert len(list(self.G.triples((URIRef("http://test/bar"),None,None)))) == 2 assert len(list(self.G.triples((None,URIRef("http://test/d"),None)))) == 3 #Test __len__ assert len(self.G) == 8 #assert context iteration for g in self.G.contexts(): assert isinstance(g,Graph) #Test __contains__ assert (URIRef("http://test/foo"),RDF.type,RDFS.Resource) in self.G barPredicates = [URIRef("http://test/d"),RDFS.isDefinedBy] assert len(list(self.G.triples_choices((URIRef("http://test/bar"),barPredicates,None)))) == 2 class GraphAggregates2(unittest.TestCase): def setUp(self): memStore = plugin.get('IOMemory',Store)() self.graph1 = Graph(memStore,URIRef("http://example.com/graph1")) self.graph2 = Graph(memStore,URIRef("http://example.com/graph2")) self.graph3 = Graph(memStore,URIRef("http://example.com/graph3")) for n3Str,graph in [(testGraph1N3,self.graph1), (testGraph2N3,self.graph2), (testGraph3N3,self.graph3)]: graph.parse(StringIO(n3Str),format='n3') self.graph4 = Graph(memStore,RDFS.RDFSNS) self.graph4.parse(RDFS.RDFSNS) self.G = ConjunctiveGraph(memStore) def testAggregateSPARQL(self): print sparqlQ rt = self.G.query(sparqlQ) assert len(rt) > 1 #print rt.serialize(format='xml') LOG_NS = Namespace(u'http://www.w3.org/2000/10/swap/log#') rt=self.G.query(sparqlQ2,initBindings={u'?graph' : URIRef("http://example.com/graph3")}) #print rt.serialize(format='json') assert rt.serialize('python')[0] == LOG_NS.N3Document,repr(list(rt.serialize('python'))) class GraphAggregates3(unittest.TestCase): def setUp(self): memStore = plugin.get('IOMemory',Store)() self.graph1 = Graph(memStore,URIRef("graph1")) self.graph2 = Graph(memStore,URIRef("graph2")) self.graph3 = Graph(memStore,URIRef("graph3")) for n3Str,graph in [(testGraph1N3,self.graph1), (testGraph2N3,self.graph2), (testGraph3N3,self.graph3)]: graph.parse(StringIO(n3Str),format='n3') self.G = ConjunctiveGraph(memStore) def testDefaultGraph(self): #test that CG includes triples from all 3 assert self.G.query(sparqlQ3),"CG as default graph should *all* triples" assert not self.graph2.query(sparqlQ3),"Graph as default graph should *not* include triples from other graphs" if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/test_sparql_equals.py0000644000175000017500000000214511153616026017172 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef from StringIO import StringIO import unittest class TestSparqlEquals(unittest.TestCase): PREFIXES = { 'rdfs': "http://www.w3.org/2000/01/rdf-schema#" } def setUp(self): testContent = """ @prefix rdfs: <%(rdfs)s> . rdfs:label "Document 1"@en . rdfs:label "Document 2"@en . rdfs:label "Document 3"@en . """ % self.PREFIXES self.graph = graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_uri_equals(self): uri = URIRef("http://example.org/doc/1") query = (""" PREFIX rdfs: <%(rdfs)s> SELECT ?uri WHERE { ?uri rdfs:label ?label . FILTER( ?uri = <"""+uri+"""> ) } """) % self.PREFIXES res = self.graph.query(query) expected = [uri] self.assertEqual(res.selected,expected) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/test_sparql_base_ref.py0000644000175000017500000000151411153616026017445 0ustar nachonachofrom rdflib import ConjunctiveGraph, Literal from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . a foaf:Person; foaf:name "Alice"; foaf:knows .""" test_query = """ BASE SELECT ?name WHERE { [ a :Person ; :name ?name ] }""" class TestSparqlJsonResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def test_base_ref(self): rt=self.graph.query(test_query).serialize("python") self.failUnless(rt[0] == Literal("Alice"),"Expected:\n 'Alice' \nGot:\n %s" % rt) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/test_sparql_naf.py0000644000175000017500000000216111153616026016442 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef, Literal, RDFS from StringIO import StringIO import unittest testContent = """ @prefix rdfs: . rdfs:label "Document 1","Document 2". rdfs:label "Document 1".""" doc1 = URIRef("http://example.org/doc/1") doc2 = URIRef("http://example.org/doc/2") QUERY = u""" SELECT ?X WHERE { ?X ?label "Document 1". OPTIONAL { ?X ?label ?otherLabel. FILTER ( ?otherLabel != "Document 1" ) } FILTER (!bound(?otherLabel)) }""" class TestSparqlOPT_FILTER(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_OPT_FILTER(self): results = self.graph.query(QUERY, DEBUG=False, initBindings={'?label':RDFS.label}).serialize(format='python') self.failUnless(list(results) == [doc2], "expecting : %s"%repr([doc2])) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/test_datatype_encoding_mysql.py0000644000175000017500000000240511153616026021223 0ustar nachonachofrom rdflib import Literal, ConjunctiveGraph, Namespace, BNode, URIRef, Literal, plugin from rdflib.store import Store, VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.Graph import Graph, ConjunctiveGraph from rdflib.store.FOPLRelationalModel.QuadSlot import * def test_dType_encoding(): correct=normalizeValue('http://www.w3.org/2001/XMLSchema#integer', 'U') wrong=normalizeValue('http://www.w3.org/2001/XMLSchema#integer', 'L') store = plugin.get('MySQL',Store)() store.open('user=..,password=..,db=test,host=..',create=False) Graph(store).add((BNode(),URIRef('foo'),Literal(1))) db=store._db cursor=db.cursor() cursor.execute( "select * from %s where data_type = '%s'"% (store.literalProperties, wrong)) assert not cursor.fetchone(),"Datatype encoding bug!" for suffix,(relations_only,tables) in store.viewCreationDict.items(): query='create view %s%s as %s'%(store._internedId, suffix, ' union all '.join([t.viewUnionSelectExpression(relations_only) for t in tables])) print "## Creating View ##\n",query store.rollback() store.close() if __name__ == '__main__': test_dType_encoding() rdflib-2.4.2/test/type_check.pyc0000644000175000017500000000364311164176137015552 0ustar nachonachoÑò ¯Ic @s…ddkZddklZddklZddklZddklZddklZedƒZ dei fd „ƒYZ dS( iÿÿÿÿN(tGraph(tSubjectTypeError(tPredicateTypeError(tObjectTypeError(tURIReftfoot TypeCheckCasecBsGeZeZdZdZd„Zd„Zd„Zd„Z d„Z RS(tdefaulttstorecCs,td|iƒ|_|ii|iƒdS(Ntbackend(RR Rtopentpath(tself((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pytsetUpscCs|iiƒdS(N(Rtclose(R ((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyttearDownscCs&|it|iidttfƒdS(N(t assertRaisesRRtaddtNoneR(R ((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyttestSubjectTypeChecks cCs&|it|iitdtfƒdS(N(RRRRRR(R ((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyttestPredicateTypeChecks cCs&|it|iittdfƒdS(N(RRRRRR(R ((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyttestObjectTypeCheck s ( t__name__t __module__tTruetunstableR R R RRRR(((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyR s    ( tunittestt rdflib.GraphRtrdflib.exceptionsRRRt rdflib.URIRefRRtTestCaseR(((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/type_check.pyts  rdflib-2.4.2/test/store_performace.pyc0000644000175000017500000001236011164176137016767 0ustar nachonachoÑò ¯Ic@sddkZddklZddklZddkZddkZddklZddklZddk l Z d„Z dei fd „ƒYZ d e fd „ƒYZy*dd klZd e fd„ƒYZWnej oZdGeGHnXy&ddkZde fd„ƒYZWnej oZdGeGHnXy&ddkZde fd„ƒYZWnej oZdGeGHnXy>ddkZddkZddkZde fd„ƒYZWnej oZdGeGHnXedjoeiƒndS(iÿÿÿÿN(tGraph(tURIRef(ttime(trandom(tmkdtempcCstdtƒƒS(Ns%s(RR(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyt random_uri st StoreTestCasecBsAeZdZdZd„Zd„Zd„Zd„Zd„ZRS(sœ Test case for testing store performance... probably should be something other than a unit test... but for now we'll add it as a unit test. tdefaultcCsÆtiƒ|_tiƒtiƒtd|iƒ|_|idjo:ddkl }ddk l }|}|ƒi |ƒnt ƒ}}|ii|dtƒtƒ|_}|idƒdS(NtstoretMySQLiÿÿÿÿ(t configString(R tcreateshttp://eikeon.com(tgct isenabledtgcoldtcollecttdisableRRtgrapht test.mysqlR trdflib.store.MySQLR tdestroyRtopentTruetinputtparse(tselfR R tpatht a_tmp_dirR((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pytsetUps   cCs/|iiƒ|iotiƒn|`dS(N(RtcloseRR tenable(R((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyttearDown(s  cCsmd}|iGHdGx$tid|ƒD]}|iƒq%WdGx$tid|ƒD]}|iƒqPWdGHdS(Nisinput:srandom:t.(Rt itertoolstrepeattNonet _testInputt _testRandom(Rtnumberti((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyttestTime/scsst|iƒ}|i‰‡fd†}tid|ƒ}tƒ}x|D] }|ƒqIWtƒ}d||GdS(Ncs5tƒ}tƒ}tƒ}ˆi|||fƒdS(N(Rtadd(tstpto(R(s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyt add_random>s   s%.3g(tlenRRR!R"R#R(RR&R-tittt0t_itt1((Rs@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR%:s    csmd}ˆi‰‡‡fd†}tid|ƒ}tƒ}x|D] }|ƒqCWtƒ}d||GdS(Nics%xˆiD]}ˆi|ƒq WdS(N(RR)(tt(RR(s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pytadd_from_inputOs s%.3g(RR!R"R#R(RR&R4R/R0R1R2((RRs@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR$Ks    ( t__name__t __module__t__doc__RRRR(R%R$(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyRs   tMemoryStoreTestCasecBseZdZRS(tMemory(R5R6R(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR8[s(t SleepycattSleepycatStoreTestCasecBseZdZRS(R:(R5R6R(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR;`ssCan not test Sleepycat store:tZODBStoreTestCasecBseZeZdZRS(tZODB(R5R6Rtnon_standard_depR(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR<hssCan not test ZODB store:tRedLandTestCasecBseZeZdZRS(tRedland(R5R6RR>R(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyR?rssCan not test Redland store:t MySQLTestCasecBseZeZdZRS(R (R5R6RR>R(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyRAssCan not test MySQL store:t__main__(tunittestt rdflib.GraphRtrdflibRR R!RRttempfileRRtTestCaseRR8trdflib.store.SleepycatR:R;t ImportErrortet persistentR<tRDFR?tMySQLdbtshatsysRAR5tmain(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/store_performace.pyts@    K  $ rdflib-2.4.2/test/rules.pyc0000644000175000017500000000572711164176136014572 0ustar nachonachoÑò ¯Ic @s¿ddkZddklZddkTddklZedƒZyXddkl Z ddk l Z d„Z d „Z d „Zd eifd „ƒYZWnej oZd eGHnXdS(iÿÿÿÿN(tmkdtemp(t*(tGraphs#http://www.w3.org/2000/10/swap/log#(tterms(t InterpretercCs€t|tƒoti|ƒSt|tƒoti|ƒSt|tƒo|St|tƒo|Stdt|ƒƒ‚dS(NsUnexpected Type: %s( t isinstancetVariableRtBNodetExivartURIReftLiteralt Exceptionttype(tnode((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pyt_convertsccsDx=|D]5\}}}tit|ƒt|ƒt|ƒƒVqWdS(N(RtPatternR(tgtstpto((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pytpatternssccszxs|D]k\}}}|tijoLt|tƒ o;t|tƒ o*tit|ƒt|ƒt|ƒƒVqqWdS(N(tLOGtimpliesRRRtFactR(RRRR((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pytfacts s2tPychinkoTestCasecBs)eZdZd„Zd„Zd„ZRS(tdefaultcCsEtd|iƒ|_|iidtƒƒ|iidddƒdS(Ntstoret configurations test/a.n3tformattn3(RtbackendRtopenRtparse(tself((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pytsetUp'scCs|iiƒdS(N(Rtclose(R"((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pyttearDown,sc Csßg}xx|iidtidfƒD]X\}}}tt|ƒƒ}tt|ƒƒ}|iti |||||fƒƒq%Wt |ƒ}t ƒ}|i dƒ|} |i} |i tt| ƒƒdtƒ|iƒdS(Nshttp://eikeon.com/t initialSet(RttriplestNoneRRtlistRtappendRtRuleRRR!taddFactstsetRtTruetrun( R"trulesRRRtlhstrhstinterptftsource((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pyt testPychinko/s)    (t__name__t __module__RR#R%R6(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pyR%s  sCould not test Pychinko: %s(tunittestttempfileRtrdflibt rdflib.GraphRt NamespaceRtpychinkoRtpychinko.interpreterRRRRtTestCaseRt ImportErrorte(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/rules.pyts      rdflib-2.4.2/test/seq.pyc0000644000175000017500000000454211164176136014222 0ustar nachonachoÑò ¯Ic@sƒddkZddkTddklZddklZdeifd„ƒYZd„Ze djoei d d ƒnd Z dS( iÿÿÿÿN(t*(tGraph(tStringInputSourcet SeqTestCasecBs/eZdZdZd„Zd„Zd„ZRS(tdefaulttstorecCs@td|iƒ}|_|i|iƒ|ittƒƒdS(NR(RtbackendRtopentpathtparseRts(tselfR((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pytsetUp scCs|iiƒdS(N(Rtclose(R ((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pyttearDownscCs|iitdƒƒ}|it|ƒdƒ|i|diƒtdƒƒ|i|diƒtdƒƒ|iiƒdS(Nshttp://example.org/Seqiiÿÿÿÿshttp://example.org/sixishttp://example.org/three(RtseqtURIReft assertEqualstlentconcretet serialize(R titems((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pyttestSeqs   (t__name__t __module__RRR RR(((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pyRs   cCs titƒS(N(tunittestt makeSuiteR(((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pyt test_suitest__main__t defaultTestRsE ( Rtrdflibt rdflib.GraphRtrdflib.StringInputSourceRtTestCaseRRRtmainR (((s3/Users/eikeon/rdflib-svn/branches/2.4.x/test/seq.pyts    rdflib-2.4.2/test/test_sparql_date_filter.pyc0000644000175000017500000000621611164177226020336 0ustar nachonachoÑò ¯Ic @sÊddklZlZlZlZddklZddklZddklZddk l Z ddk Z dZ dZ d Z d Zed ƒZd e ifd „ƒYZedjoe iƒndS(iÿÿÿÿ(tConjunctiveGraphtURIReftLiteraltRDFS(tRenderSPARQLAlgebra(tParse(R(tStringIONsØ @prefix foaf: . @prefix dc: . @prefix xsd: . a foaf:Document; dc:date "2006-10-01T12:35:00"^^xsd:dateTime. a foaf:Document; dc:date "2005-05-25T08:15:00"^^xsd:dateTime. a foaf:Document; dc:date "1990-01-01T12:45:00"^^xsd:dateTime.uF PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < xsd:dateTime("2006-01-01T00:00:00") && ?date > xsd:dateTime("1995-06-15T00:00:00")) }u* PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < "2006-01-01T00:00:00" && ?date > "1995-06-15T00:00:00") }uG PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < "2006-01-01T00:00:00"^^xsd:dateTime && ?date > "1995-06-15T00:00:00"^^xsd:dateTime ) }s%http://del.icio.us/rss/chimezie/papertDateFilterTestcBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtloadRt testContent(tself((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_date_filter.pytsetUp;s cCs˜x‘tttgD]€}|GHt|ƒ}|ii|dtƒiddƒ}t|ƒ}|i t |ƒo |t gjdt gt |ƒfƒqWdS(NtDEBUGRtpythonsexpecting : %s . Got: %s( tQUERY1tQUERY2tQUERY3RR tquerytFalset serializetlistt failUnlesstlentANSWER1trepr(R RtpQuerytresults((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_date_filter.pyttest_DATE_FILTER1>s  (t__name__t __module__RR(((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_date_filter.pyR:s t__main__(trdflibRRRRtrdflib.sparql.AlgebraRtrdflib.sparql.bisonRRtunittestR RRRRtTestCaseRRtmain(((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_date_filter.pyts"      rdflib-2.4.2/test/test_issue_45.py0000644000175000017500000000313011153616026015751 0ustar nachonachoimport unittest from rdflib.Graph import ConjunctiveGraph as Graph from rdflib.Namespace import Namespace as NS from rdflib.sparql import Algebra from StringIO import StringIO class TestSparqlASK(unittest.TestCase): def setUp(self): self.graph = Graph() io = StringIO(""" @prefix rdfs: . @prefix : . :Foo a rdfs:Class . :bar a :Foo . """) self.graph.load(io, format='n3') self.compliance_setting, Algebra.DAWG_DATASET_COMPLIANCE = Algebra.DAWG_DATASET_COMPLIANCE, False def tearDown(self): Algebra.DAWG_DATASET_COMPLIANCE = self.compliance_setting def test_ask_true(self): """ Ask for a triple that exists, assert that the response is True. """ res = self.graph.query('ASK { a } ') self.assertEquals(res.askAnswer, [True], "The answer should have been that the triple was found") def test_ask_false(self): """ Ask for a triple that does not exist, assert that the response is False. """ res = self.graph.query('ASK { a } ') self.assertEquals(res.askAnswer, [False], "The answer should have been that the triple was not found") class TestSparqlASKWithCompliance(TestSparqlASK): def setUp(self): TestSparqlASK.setUp(self) Algebra.DAWG_DATASET_COMPLIANCE = True if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/test_sparql_told_bnodes.pyc0000644000175000017500000000436611164177226020354 0ustar nachonachoÑò ¯Ic @sÆddklZddklZlZlZlZlZlZl Z l Z ddk l Z l Z lZddkZddkZddklZdeifd„ƒYZedjoeiƒndS( iÿÿÿÿ(t Namespace(tplugintRDFtRDFStURIReftStringInputSourcetLiteraltBNodetVariable(tGraphtReadOnlyGraphAggregatetConjunctiveGraphN(tpprinttTestSPARQLToldBNodescBseZd„Zd„ZRS(cCsBd}tƒ|_|iitdtitifƒddƒdS(Nuhttp://example.org/s§ @prefix : . @prefix rdf: <%s> . @prefix rdfs: <%s> . [ :prop :val ]. [ a rdfs:Class ].tformattn3(R tgraphtparseRRtRDFNSRtRDFSNS(tselftNS((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_told_bnodes.pytsetUps  cCsæx/|iidtidfƒD]\}}}qWd|iƒ}|GH|ii|ƒ}|it|ƒdjdt|ƒƒh|t dƒ6}d}|ii|d|ƒ}|it|ƒdjdt|ƒ|iƒfƒdS(Ns#SELECT ?obj WHERE { %s ?prop ?obj }isDBGP should only match the 'told' BNode by name (result set size: %s)s?subjs&SELECT ?obj WHERE { ?subj ?prop ?obj }t initBindingssOBGP should only match the 'told' BNode by name (result set size: %s, BNode: %s)( RttriplestNoneRttypeRtqueryt failUnlesstlenR(RtstptoRtrttbindings((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_told_bnodes.pyt testToldBNodes&(t__name__t __module__RR#(((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_told_bnodes.pyR s t__main__(trdflib.NamespaceRtrdflibRRRRRRRRt rdflib.GraphR R R tunittesttsysR tTestCaseR R$tmain(((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_told_bnodes.pyts: rdflib-2.4.2/test/test_sparql_filter_bound.pyc0000644000175000017500000000261711164177226020531 0ustar nachonachoÑò ¯Ic@sàddklZlZlZlZlZedƒZedƒZeƒZeƒZ ei e ededƒfƒeƒZ ei e ededƒfƒei e ededƒfƒd „Z e d jo e ƒnd S( iÿÿÿÿ(tLiteraltConjunctiveGrapht NamespacetBNodetURIRefu http://purl.org/dc/elements/1.1/uhttp://xmlns.com/foaf/0.1/t givenNametAlicetBobtdates2005-04-04T04:04:04ZcCsXtidƒidƒ}tdddddƒg}||jptd||f‚dS(Ns+PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?name WHERE { ?x foaf:givenName ?name . OPTIONAL { ?x dc:date ?date } . FILTER ( bound(?date) ) }tpythonRtlangtdatatypesExpected %s but got %s(tgraphtqueryt serializeRtNonetAssertionError(trestexpected((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_filter_bound.pyt test_bound st__main__N(trdflibRRRRRtDCtFOAFR tstaddtbRt__name__(((sH/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_filter_bound.pyts(         rdflib-2.4.2/test/test_sparql_naf2.py0000644000175000017500000000230411153616026016523 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef, Literal, RDFS from StringIO import StringIO import unittest testContent = """ @prefix foaf: . foaf:name "Chime"; a foaf:Person. foaf:knows ,. foaf:name "Ivan".""" doc1 = URIRef("http://eikeon.com/") QUERY = u""" PREFIX foaf: SELECT ?X WHERE { ?P a foaf:Person . ?X foaf:knows ?P . OPTIONAL { ?X foaf:knows ?OP . ?OP foaf:name "Judas" } FILTER (!bound(?OP)) }""" class TestSparqlOPT_FILTER2(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_OPT_FILTER(self): results = self.graph.query(QUERY, DEBUG=False).serialize(format='python') results = list(results) self.failUnless( results == [doc1], "expecting : %s . Got: %s"%([doc1],repr(results))) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/test_sparql_naf.pyc0000644000175000017500000000350511164177226016616 0ustar nachonachoÑò ¯Ic@sšddklZlZlZlZddklZddkZdZedƒZedƒZ dZ dei fd „ƒYZ e d joeiƒndS( iÿÿÿÿ(tConjunctiveGraphtURIReftLiteraltRDFS(tStringIONs¹ @prefix rdfs: . rdfs:label "Document 1","Document 2". rdfs:label "Document 1".shttp://example.org/doc/1shttp://example.org/doc/2u£ SELECT ?X WHERE { ?X ?label "Document 1". OPTIONAL { ?X ?label ?otherLabel. FILTER ( ?otherLabel != "Document 1" ) } FILTER (!bound(?otherLabel)) }tTestSparqlOPT_FILTERcBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtloadRt testContent(tself((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf.pytsetUps cCsd|iitdtdhtid6ƒiddƒ}|it|ƒt gjdt t gƒƒdS(NtDEBUGt initBindingss?labelRtpythonsexpecting : %s( RtquerytQUERYtFalseRtlabelt serializet failUnlesstlisttdoc2trepr(R tresults((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf.pyttest_OPT_FILTERs (t__name__t __module__R R(((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf.pyRs t__main__(trdflibRRRRRtunittestR tdoc1RRtTestCaseRRtmain(((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf.pyts"    rdflib-2.4.2/test/n3_quoting.pyc0000644000175000017500000000265211164176136015520 0ustar nachonachoÑò ¯Ic@s—ddkZddklZlZlZddklZdddddd d d d d dg Zdeifd„ƒYZ e djoei ƒndS(iÿÿÿÿN(tLiteralt NamespacetStringInputSource(tGraphs no quotesssingle ' quotesdouble " quotet"t's"'"s\s\"s\\"s\"\s)heret N3QuotingcBseZd„ZRS(cCsãtƒ}tdƒ}xBttƒD]4\}}|i|d|d|t|ƒfƒq"W|iddƒ}tƒ}|it|ƒddƒxNttƒD]@\}}|i |d|d|ƒ}|i |t|ƒƒq›WdS(Nshttp://quoting.test/tsubjscase%stformattn3( RRt enumeratetcasestaddRt serializetparseRtvaluet assertEqual(tselftgtNStitcasetn3txttg2tl((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3_quoting.pyttests   ,  (t__name__t __module__R(((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3_quoting.pyRst__main__( tunittesttrdflibRRRt rdflib.GraphRR tTestCaseRRtmain(((s:/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3_quoting.pyts   rdflib-2.4.2/test/broken_parse_test/0000755000175000017500000000000011204354476016421 5ustar nachonachordflib-2.4.2/test/broken_parse_test/n3-writer-test-02.n30000644000175000017500000000053011153616022021676 0ustar nachonacho@prefix a: . @prefix : <#> . :test :comment "String tests". a:b a:p01 "123". a:b a:p02 "123-\"-\'-quote". a:b a:p03 '123-\"-\'-quote'. a:b a:p10 """Triple double-quoted string with single quotes (" ') and newline in it""". a:b a:p11 '''Triple single-quoted string with single quotes (" ') and newline in it'''. rdflib-2.4.2/test/broken_parse_test/n3-writer-test-25.n30000644000175000017500000000051611153616022021707 0ustar nachonacho# Datatypes and language tags @prefix rdf: . @prefix x: . [ x:p1 "bar"@en-us^^rdf:XMLLiteral ; # Two below are same statement x:p2 "bar"^^rdf:XMLLiteral@en-us ; x:p2 "bar"@en-us^^rdf:XMLLiteral ; x:p3 "text"@en ; ] . rdflib-2.4.2/test/broken_parse_test/n3-writer-test-27.n30000644000175000017500000000125711153616022021714 0ustar nachonacho# Numbers @prefix xsd: . @prefix x: . @prefix : <#> . # Each of these should be one statement. # Also tested in reader test rdf-test-25.n3 :x :d1 -1.0 ; :d1 "-1.0"^^xsd:double . :x :d2 +1.0 ; :d2 "+1.0"^^xsd:double . :x :d3 1.0 ; :d3 "1.0"^^xsd:double . :x :e1 1.0e5 ; :e1 "1.0e5"^^xsd:double . :x :e2 1e6 ; :e2 "1e6"^^xsd:double . :x :e3 1e+6 ; :e3 "1e+6"^^xsd:double . :x :e4 1e-6 ; :e4 "1e-6"^^xsd:double . :x :i1 1 ; :i1 "1"^^xsd:integer . :x :i2 +1 ; :i2 "+1"^^xsd:integer . :x :i3 -1 ; :i3 "-1"^^xsd:integer . :y :p "123"^^xsd:integer . :y :p "123"^^xsd:double . rdflib-2.4.2/test/broken_parse_test/n3-writer-test-29.n30000644000175000017500000000113611153616022021712 0ustar nachonacho# Test qname-ization @prefix : . @prefix ns: . @prefix ns2: . @prefix ex: . # Ensure we don't write ns:p1/p2 (illegal URI) :x "1" . # Legal URI :x "1" . # Numeric namespace prefix: gives a warning on reading # as Jena models work on XML rules. #@prefix 1: . :x "1" . # Numberic localname is allowed. :x ex:1 "2" . # As is _1 :x ex:_1 "rdf:_1 test" . rdflib-2.4.2/test/broken_parse_test/rdf-test-01.n30000644000175000017500000000055411153616022020624 0ustar nachonacho# Test basic ways of writing statements @prefix x: . @prefix : <#> . # Statement x:a1 :b "string" . x: :b "string" . # Compound statements x:a2 :b "v1" ; :b "v2" . x:a2 :b1 "v3" , "v4" . x:a2 :b2 "a" , "b" ; :b3 "a", "b". # Null property lists x:a3 :p1 "z1" ; :p2 "z2" ; . # Null object list x:a3 :p "z1" , "z2" , . rdflib-2.4.2/test/broken_parse_test/rdf-test-08.n30000644000175000017500000000201011153616022020620 0ustar nachonacho# WARNING! This file must be checked in CVS as binary to avoid # newline conversion. # Literals : these are all the same statement ## cwm does now allow '' quoted literals '0'. "1" . '2'. "12" , '12'. ## cwm does not allow ''' quoted strings '''123''', """123""". # Literals with a " in them 'Literal with " double quote - 1' . "Literal with \" double quote - 2" . """Long "DoubleQuoted" String""". """Long 'SingleQuoted' String""". # Newlines """Unix newline convention""". """Windows newline convention""". """Mac newline convention""". # UTF-8 chacacter # This fails under Java 1.3 but passes on 1.4.1 # The string should contain octal 200. # "" , 'euro'. rdflib-2.4.2/test/broken_parse_test/rdf-test-10.n30000644000175000017500000000037011153616022020620 0ustar nachonacho# Test named anon nodes :p [ ]. ; :p [ ]. :p [] . rdflib-2.4.2/test/broken_parse_test/rdf-test-24.n30000644000175000017500000000111311153616022020621 0ustar nachonacho# Language tags and datatypes @prefix : <#> . @prefix rdf: . @prefix xsd: . # In each of these, there is exactly one statement # This parser allows lag tags and datatypes in any order # One statement :a :q0 "11"@en^^xsd:integer ; :q0 "11"^^xsd:integer@en . # One statement :a :q1 "11"@fr^^xsd:integer ; :q1 "11"@not-fr^^xsd:integer ; :q1 "11"^^xsd:integer ; . # Two statements :a :q2 "bar"@en^^rdf:XMLLiteral , "bar"@en . rdflib-2.4.2/test/broken_parse_test/rdf-test-25.n30000644000175000017500000000062111153616022020625 0ustar nachonacho@prefix : <#> . @prefix rdf: . @prefix xsd: . :x :f1 -1.0 . :x :f2 +1.0 . :x :f3 1.0 . :x :f4 0.1 . :x :e1 1.0e5 . :x :e2 1e6 . :x :e3 1e+6 . :x :e3a 1e+6. :x :e4 1e-6 . :x :E1 1.0E5 . :x :E2 1E6 . :x :E3 1E+6 . :x :E3a 1E+6. :x :E4 1E-6 . :x :i1 1 . :x :i1a 1. :x :i2 +1 . :x :i3 -1 . rdflib-2.4.2/test/broken_parse_test/rdf-test-27.n30000644000175000017500000000033711153616022020633 0ustar nachonacho@prefix : <#> . @prefix x: . # \u in URIs not supported # @prefix e: . # e:alpha "alpha" . x:α x:p "alpha qname" . rdflib-2.4.2/test/test_sparql_date_filter.py0000644000175000017500000000472711153616026020172 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef, Literal, RDFS from rdflib.sparql.Algebra import RenderSPARQLAlgebra from rdflib.sparql.bison import Parse from rdflib import URIRef from StringIO import StringIO import unittest testContent = """ @prefix foaf: . @prefix dc: . @prefix xsd: . a foaf:Document; dc:date "2006-10-01T12:35:00"^^xsd:dateTime. a foaf:Document; dc:date "2005-05-25T08:15:00"^^xsd:dateTime. a foaf:Document; dc:date "1990-01-01T12:45:00"^^xsd:dateTime.""" QUERY1 = u""" PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < xsd:dateTime("2006-01-01T00:00:00") && ?date > xsd:dateTime("1995-06-15T00:00:00")) }""" QUERY2 = u""" PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < "2006-01-01T00:00:00" && ?date > "1995-06-15T00:00:00") }""" QUERY3 = u""" PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?doc WHERE { ?doc a foaf:Document; dc:date ?date. FILTER (?date < "2006-01-01T00:00:00"^^xsd:dateTime && ?date > "1995-06-15T00:00:00"^^xsd:dateTime ) }""" ANSWER1 = URIRef('http://del.icio.us/rss/chimezie/paper') class DateFilterTest(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_DATE_FILTER1(self): for query in [QUERY1,QUERY2,QUERY3]: print query pQuery = Parse(query) #print RenderSPARQLAlgebra(pQuery) results = self.graph.query(pQuery, DEBUG=False).serialize(format='python') results = list(results) self.failUnless( len(results) and results == [ANSWER1], "expecting : %s . Got: %s"%([ANSWER1],repr(results))) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/test_sparql_told_bnodes.py0000644000175000017500000000250211153616026020171 0ustar nachonachofrom rdflib.Namespace import Namespace from rdflib import plugin,RDF,RDFS,URIRef, StringInputSource, Literal, BNode, Variable from rdflib.Graph import Graph,ReadOnlyGraphAggregate,ConjunctiveGraph import unittest,sys from pprint import pprint class TestSPARQLToldBNodes(unittest.TestCase): def setUp(self): NS = u"http://example.org/" self.graph = ConjunctiveGraph() self.graph.parse(StringInputSource(""" @prefix : . @prefix rdf: <%s> . @prefix rdfs: <%s> . [ :prop :val ]. [ a rdfs:Class ]."""%(RDF.RDFNS,RDFS.RDFSNS)), format="n3") def testToldBNode(self): for s,p,o in self.graph.triples((None,RDF.type,None)): pass query = """SELECT ?obj WHERE { %s ?prop ?obj }"""%s.n3() print query rt = self.graph.query(query) self.failUnless(len(rt) == 1,"BGP should only match the 'told' BNode by name (result set size: %s)"%len(rt)) bindings = {Variable('?subj'):s} query = """SELECT ?obj WHERE { ?subj ?prop ?obj }""" rt = self.graph.query(query,initBindings=bindings) self.failUnless(len(rt) == 1,"BGP should only match the 'told' BNode by name (result set size: %s, BNode: %s)"%(len(rt),s.n3())) if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/test_datetime.py0000644000175000017500000000061011153616026016105 0ustar nachonachoimport unittest from rdflib import URIRef, Literal class TestRelativeBase(unittest.TestCase): def setUp(self): self.x = Literal("2008-12-01T18:02:00Z", datatype=URIRef('http://www.w3.org/2001/XMLSchema#dateTime')) def test_equality(self): self.assertEquals(self.x == self.x, True) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/ntriples.py0000644000175000017500000001437211153616026015124 0ustar nachonacho#!/usr/bin/env python """ N-Triples Parser License: GPL 2; share and enjoy! Author: Sean B. Palmer, inamidst.com Documentation: http://inamidst.com/proj/rdf/ntriples-doc Command line usage: ./ntriples.py - parses URI as N-Triples ./ntriples.py --help - prints out this help message # @@ fully empty document? """ import re uriref = r'<([^:]+:[^\s"<>]+)>' literal = r'"([^"\\]*(?:\\.[^"\\]*)*)"' litinfo = r'(?:@([a-z]+(?:-[a-z0-9]+)*)|\^\^' + uriref + r')?' r_line = re.compile(r'([^\r\n]*)(?:\r\n|\r|\n)') r_wspace = re.compile(r'[ \t]*') r_wspaces = re.compile(r'[ \t]+') r_tail = re.compile(r'[ \t]*\.[ \t]*') r_uriref = re.compile(uriref) r_nodeid = re.compile(r'_:([A-Za-z][A-Za-z0-9]*)') r_literal = re.compile(literal + litinfo) bufsiz = 2048 validate = False class Node(unicode): pass class URI(Node): pass class bNode(Node): pass class Literal(Node): def __new__(cls, lit, lang=None, dtype=None): n = str(lang) + ' ' + str(dtype) + ' ' + lit return unicode.__new__(cls, n) class Sink(object): def __init__(self): self.length = 0 def triple(self, s, p, o): self.length += 1 class ParseError(Exception): pass quot = {'t': '\t', 'n': '\n', 'r': '\r', '"': '"', '\\': '\\'} r_safe = re.compile(r'([\x20\x21\x23-\x5B\x5D-\x7E]+)') r_quot = re.compile(r'\\(t|n|r|"|\\)') r_uniquot = re.compile(r'\\u([0-9A-F]{4})|\\U([0-9A-F]{8})') def unquote(s): """Unquote an N-Triples string.""" result = [] while s: m = r_safe.match(s) if m: s = s[m.end():] result.append(m.group(1)) continue m = r_quot.match(s) if m: s = s[2:] result.append(quot[m.group(1)]) continue m = r_uniquot.match(s) if m: s = s[m.end():] u, U = m.groups() codepoint = int(u or U, 16) if codepoint > 0x10FFFF: raise ParseError("Disallowed codepoint: %08X" % codepoint) result.append(unichr(codepoint)) elif s.startswith('\\'): raise ParseError("Illegal escape at: %s..." % s[:10]) else: raise ParseError("Illegal literal character: %r" % s[0]) return unicode(''.join(result)) if not validate: def unquote(s): return s.decode('unicode-escape') r_hibyte = re.compile(r'([\x80-\xFF])') def uriquote(uri): return r_hibyte.sub(lambda m: '%%%02X' % ord(m.group(1)), uri) if not validate: def uriquote(uri): return uri class NTriplesParser(object): """An N-Triples Parser. Usage: p = NTriplesParser(sink=MySink()) sink = p.parse(f) # file; use parsestring for a string """ def __init__(self, sink=None): if sink is not None: self.sink = sink else: self.sink = Sink() def parse(self, f): """Parse f as an N-Triples file.""" if not hasattr(f, 'read'): raise ParseError("Item to parse must be a file-like object.") self.file = f self.buffer = '' while True: self.line = self.readline() if self.line is None: break try: self.parseline() except ParseError: raise ParseError("Invalid line: %r" % self.line) return self.sink def parsestring(self, s): """Parse s as an N-Triples string.""" if not isinstance(s, basestring): raise ParseError("Item to parse must be a string instance.") from cStringIO import StringIO f = StringIO() f.write(s) f.seek(0) self.parse(f) def readline(self): """Read an N-Triples line from buffered input.""" # N-Triples lines end in either CRLF, CR, or LF # Therefore, we can't just use f.readline() if not self.buffer: buffer = self.file.read(bufsiz) if not buffer: return None self.buffer = buffer while True: m = r_line.match(self.buffer) if m: # the more likely prospect self.buffer = self.buffer[m.end():] return m.group(1) else: buffer = self.file.read(bufsiz) if not buffer: raise ParseError("EOF in line") self.buffer += buffer def parseline(self): self.eat(r_wspace) if (not self.line) or self.line.startswith('#'): return # The line is empty or a comment subject = self.subject() self.eat(r_wspaces) predicate = self.predicate() self.eat(r_wspaces) object = self.object() self.eat(r_tail) if self.line: raise ParseError("Trailing garbage") self.sink.triple(subject, predicate, object) def peek(self, token): return self.line.startswith(token) def eat(self, pattern): m = pattern.match(self.line) if not m: # @@ Why can't we get the original pattern? raise ParseError("Failed to eat %s" % pattern) self.line = self.line[m.end():] return m def subject(self): # @@ Consider using dictionary cases subj = self.uriref() or self.nodeid() if not subj: raise ParseError("Subject must be uriref or nodeID") return subj def predicate(self): pred = self.uriref() if not pred: raise ParseError("Predicate must be uriref") return pred def object(self): objt = self.uriref() or self.nodeid() or self.literal() if not objt: raise ParseError("Unrecognised object type") return objt def uriref(self): if self.peek('<'): uri = self.eat(r_uriref).group(1) uri = unquote(uri) uri = uriquote(uri) return URI(uri) return False def nodeid(self): if self.peek('_'): return bNode(self.eat(r_nodeid).group(1)) return False def literal(self): if self.peek('"'): lit, lang, dtype = self.eat(r_literal).groups() if lang and dtype: raise ParseError("Can't have both a language and a datatype") lit = unquote(lit) return Literal(lit, lang, dtype) return False def parseURI(uri): import urllib parser = NTriplesParser() u = urllib.urlopen(uri) sink = parser.parse(u) u.close() # for triple in sink: # print triple print 'Length of input:', sink.length def main(): import sys if len(sys.argv) == 2: parseURI(sys.argv[1]) else: print __doc__ if __name__=="__main__": main() rdflib-2.4.2/test/test_sparql_graph_graph_pattern.py0000644000175000017500000000227711164772153021733 0ustar nachonachofrom rdflib.Graph import ConjunctiveGraph from rdflib import URIRef, Literal from StringIO import StringIO import unittest testContent = """ @prefix foaf: . foaf:name "Chime"; a foaf:Person. foaf:knows ,. foaf:name "Ivan".""" doc1 = URIRef("http://eikeon.com/") QUERY = u""" PREFIX foaf: SELECT ?X WHERE { ?P a foaf:Person . ?X foaf:knows ?P . OPTIONAL { ?X foaf:knows ?OP . ?OP foaf:name "Judas" } FILTER (!bound(?OP)) }""" class TestSparqlOPT_FILTER2(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_OPT_FILTER(self): results = self.graph.query(QUERY, DEBUG=False).serialize(format='python') results = list(results) self.failUnless( results == [doc1], "expecting : %s . Got: %s"%([doc1],repr(results))) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/rdf_lists.py0000755000175000017500000000340111153616026015247 0ustar nachonacho#!/usr/bin/python import os, sys, string import unittest from cStringIO import StringIO from rdflib.Graph import ConjunctiveGraph, Graph from rdflib import StringInputSource from rdflib import URIRef, Literal, BNode from rdflib.Collection import Collection DATA=\ """ . _:fIYNVPxd4. . . . _:fIYNVPxd3. . _:fIYNVPxd4 . _:fIYNVPxd4 . _:fIYNVPxd4 . _:fIYNVPxd3 . _:fIYNVPxd3 . """ def main(): unittest.main() class OWLCollectionTest(unittest.TestCase): def testCollectionRDFXML(self): g=Graph().parse(StringIO(DATA),format='nt') g.namespace_manager.bind('owl',URIRef('http://www.w3.org/2002/07/owl#')) print g.serialize(format='pretty-xml') if __name__ == '__main__': main() rdflib-2.4.2/test/postgres.py0000644000175000017500000000041311165212723015120 0ustar nachonachofrom n3_2 import testN3Store,testN3,implies from rdflib.Graph import QuotedGraph from rdflib import * configString="user=test,password=,host=localhost,db=test" if __name__=='__main__': testN3Store('PostgreSQL',configString) #testRegex() #profileTests() rdflib-2.4.2/test/test_datatype_encoding_mysql.pyc0000644000175000017500000000355311164177225021400 0ustar nachonachoÑò ¯Ic@s¡ddklZlZlZlZlZlZlZddklZl Z l Z l Z l Z ddk lZlZddkTd„Zedjo eƒndS(iÿÿÿÿ(tLiteraltConjunctiveGrapht NamespacetBNodetURIRefRtplugin(tStoret VALID_STOREtCORRUPTED_STOREtNO_STOREtUNKNOWN(tGraphR(t*c CsBtddƒ}tddƒ}tidtƒƒ}|iddtƒt|ƒitƒt dƒt dƒfƒ|i }|i ƒ}|i d |i|fƒ|iƒ p td ‚xo|iiƒD]^\}\}}d |i|d ig}|D]} || i|ƒq÷~ƒf} d G| GHqÈW|iƒ|iƒdS(Ns(http://www.w3.org/2001/XMLSchema#integertUtLtMySQLs#user=..,password=..,db=test,host=..tcreatetfoois'select * from %s where data_type = '%s'sDatatype encoding bug!screate view %s%s as %ss union all s## Creating View ## (tnormalizeValueRtgetRtopentFalseR taddRRRt_dbtcursortexecutetliteralPropertiestfetchonetAssertionErrortviewCreationDicttitemst _internedIdtjointviewUnionSelectExpressiontrollbacktclose( tcorrecttwrongtstoretdbRtsuffixtrelations_onlyttablest_[1]tttquery((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_encoding_mysql.pyttest_dType_encodings*+     *  t__main__N(trdflibRRRRRRt rdflib.storeRRRR R t rdflib.GraphR t)rdflib.store.FOPLRelationalModel.QuadSlotR.t__name__(((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datatype_encoding_mysql.pyts 4(   rdflib-2.4.2/test/rules.py0000644000175000017500000000366511153616026014421 0ustar nachonachoimport unittest from tempfile import mkdtemp from rdflib import * from rdflib.Graph import Graph LOG = Namespace("http://www.w3.org/2000/10/swap/log#") try: from pychinko import terms from pychinko.interpreter import Interpreter def _convert(node): if isinstance(node, Variable): return terms.Variable(node) #return node elif isinstance(node, BNode): return terms.Exivar(node) elif isinstance(node, URIRef): #return terms.URI(node) return node elif isinstance(node, Literal): return node else: raise Exception("Unexpected Type: %s" % type(node)) def patterns(g): for s, p, o in g: yield terms.Pattern(_convert(s), _convert(p), _convert(o)) def facts(g): for s, p, o in g: if p!=LOG.implies and not isinstance(s, BNode) and not isinstance(o, BNode): yield terms.Fact(_convert(s), _convert(p), _convert(o)) class PychinkoTestCase(unittest.TestCase): backend = 'default' def setUp(self): self.g = Graph(store=self.backend) self.g.open(configuration=mkdtemp()) self.g.parse("test/a.n3", format="n3") def tearDown(self): self.g.close() def testPychinko(self): rules = [] for s, p, o in self.g.triples((None, LOG.implies, None)): lhs = list(patterns(s)) rhs = list(patterns(o)) rules.append(terms.Rule(lhs, rhs, (s, p, o))) interp = Interpreter(rules) f = Graph() f.parse("http://eikeon.com/") source = f source = self.g interp.addFacts(set(facts(source)), initialSet=True) interp.run() #_logger.debug("inferred facts: %s" % interp.inferredFacts) except ImportError, e: print "Could not test Pychinko: %s" % e rdflib-2.4.2/test/IdentifierEquality.pyc0000644000175000017500000000625311164176136017233 0ustar nachonachoÑò ¯Ic@sddkZddklZlZlZddklZddklZddkl Z dei fd„ƒYZ e djoei ƒndS( iÿÿÿÿN(tURIReftBNodetLiteral(tCORE_SYNTAX_TERMS(tGraph(tRDFtIdentifierEqualitycBsbeZd„Zd„Zd„Zd„Zd„Zd„Zd„Zd„Z d„Z d „Z RS( cCs@tdƒ|_tƒ|_tdƒ|_d|_d|_dS(Nshttp://example.org/uhttp://example.org/ufoo(RturirefRtbnodeRtliteraltpython_literaltpython_literal_2(tself((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pytsetUps   cCs |i|i|ijtƒdS(N(t assertEqualsRR tFalse(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestA&scCs |i|i|ijtƒdS(N(RR RR(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestB)scCs |i|i|ijtƒdS(N(RRR R(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestC,scCs |i|i|ijtƒdS(N(RR RR(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestD/scCs |i|i|ijtƒdS(N(RR R tTrue(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestE2scCs |i|i|ijtƒdS(N(RR R R(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestF5scCs|idtjtƒdS(Ntfoo(RRR(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestG8scCs |itdƒtjtƒdS(Ns.http://www.w3.org/1999/02/22-rdf-syntax-ns#RDF(RRRR(R ((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestH;scCsatƒ}|i|iti|ifƒ|i|iti|ifƒ|it|ƒdƒdS(Ni(RtaddRRtvalueR t assertEqualtlen(R tg((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyttestI>s ( t__name__t __module__R RRRRRRRRR(((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyRs         t__main__(tunittesttrdflibRRRt#rdflib.syntax.parsers.RDFXMLHandlerRt rdflib.GraphRRtTestCaseRR tmain(((sB/Users/eikeon/rdflib-svn/branches/2.4.x/test/IdentifierEquality.pyts ( rdflib-2.4.2/test/n3_quoting.py0000644000175000017500000000174011153616026015345 0ustar nachonachoimport unittest from rdflib import Literal, Namespace, StringInputSource from rdflib.Graph import Graph cases = ['no quotes', "single ' quote", 'double " quote', '"', "'", '"\'"', '\\', # len 1 '\\"', # len 2 '\\\\"', # len 3 '\\"\\', # len 3 'here', ] class N3Quoting(unittest.TestCase): def test(self): g = Graph() NS = Namespace("http://quoting.test/") for i, case in enumerate(cases): g.add((NS['subj'], NS['case%s' % i], Literal(case))) n3txt = g.serialize(format="n3") #print n3txt g2 = Graph() g2.parse(StringInputSource(n3txt), format="n3") for i, case in enumerate(cases): l = g2.value(NS['subj'], NS['case%s' % i]) #print repr(l), repr(case) self.assertEqual(l, Literal(case)) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/sparql_limit.pyc0000644000175000017500000000276511164176137016140 0ustar nachonachoÑò ¯Ic@s†ddklZlZddklZddklZddkZdZdZdei fd„ƒYZ e d joei ƒndS( iÿÿÿÿ(tConjunctiveGraphtplugin(tStore(tStringIONsP @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . s` PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . } LIMIT 2 t TestLimitcBseZd„ZRS(cCs`ttidtƒƒƒ}|ittƒddƒ|itƒ}|i t |ƒdjƒdS(NtIOMemorytformattn3i( RRtgetRtparseRt test_datatqueryt test_queryt failUnlesstlen(tselftgraphtresults((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_limit.pyt testLimits(t__name__t __module__R(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_limit.pyRst__main__( trdflibRRt rdflib.storeRRtunittestR R tTestCaseRRtmain(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_limit.pyts  rdflib-2.4.2/test/context.py0000644000175000017500000003123111153616026014741 0ustar nachonachoimport unittest from tempfile import mkdtemp from rdflib import * from rdflib.Graph import Graph class ContextTestCase(unittest.TestCase): #store = 'Memory' store = 'default' slowtest = True def setUp(self): self.graph = ConjunctiveGraph(store=self.store) if self.store == "MySQL": from mysql import configString from rdflib.store.MySQL import MySQL path=configString MySQL().destroy(path) else: path = a_tmp_dir = mkdtemp() self.graph.open(path, create=True) self.michel = URIRef(u'michel') self.tarek = URIRef(u'tarek') self.bob = URIRef(u'bob') self.likes = URIRef(u'likes') self.hates = URIRef(u'hates') self.pizza = URIRef(u'pizza') self.cheese = URIRef(u'cheese') self.c1 = URIRef(u'context-1') self.c2 = URIRef(u'context-2') # delete the graph for each test! self.graph.remove((None, None, None)) def tearDown(self): self.graph.close() def get_context(self, identifier): assert isinstance(identifier, URIRef) or \ isinstance(identifier, BNode), type(identifier) return Graph(store=self.graph.store, identifier=identifier, namespace_manager=self) def addStuff(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese c1 = self.c1 graph = Graph(self.graph.store, c1) graph.add((tarek, likes, pizza)) graph.add((tarek, likes, cheese)) graph.add((michel, likes, pizza)) graph.add((michel, likes, cheese)) graph.add((bob, likes, cheese)) graph.add((bob, hates, pizza)) graph.add((bob, hates, michel)) # gasp! def removeStuff(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese c1 = self.c1 graph = Graph(self.graph.store, c1) graph.remove((tarek, likes, pizza)) graph.remove((tarek, likes, cheese)) graph.remove((michel, likes, pizza)) graph.remove((michel, likes, cheese)) graph.remove((bob, likes, cheese)) graph.remove((bob, hates, pizza)) graph.remove((bob, hates, michel)) # gasp! def addStuffInMultipleContexts(self): c1 = self.c1 c2 = self.c2 triple = (self.pizza, self.hates, self.tarek) # revenge! # add to default context self.graph.add(triple) # add to context 1 graph = Graph(self.graph.store, c1) graph.add(triple) # add to context 2 graph = Graph(self.graph.store, c2) graph.add(triple) def testConjunction(self): self.addStuffInMultipleContexts() triple = (self.pizza, self.likes, self.pizza) # add to context 1 graph = Graph(self.graph.store, self.c1) graph.add(triple) self.assertEquals(len(self.graph), len(graph)) def testAdd(self): self.addStuff() def testRemove(self): self.addStuff() self.removeStuff() def testLenInOneContext(self): c1 = self.c1 # make sure context is empty self.graph.remove_context(self.get_context(c1)) graph = Graph(self.graph.store, c1) oldLen = len(self.graph) for i in range(0, 10): graph.add((BNode(), self.hates, self.hates)) self.assertEquals(len(graph), oldLen + 10) self.assertEquals(len(self.get_context(c1)), oldLen + 10) self.graph.remove_context(self.get_context(c1)) self.assertEquals(len(self.graph), oldLen) self.assertEquals(len(graph), 0) def testLenInMultipleContexts(self): oldLen = len(self.graph) self.addStuffInMultipleContexts() # addStuffInMultipleContexts is adding the same triple to # three different contexts. So it's only + 1 self.assertEquals(len(self.graph), oldLen + 1) graph = Graph(self.graph.store, self.c1) self.assertEquals(len(graph), oldLen + 1) def testRemoveInMultipleContexts(self): c1 = self.c1 c2 = self.c2 triple = (self.pizza, self.hates, self.tarek) # revenge! self.addStuffInMultipleContexts() # triple should be still in store after removing it from c1 + c2 self.assert_(triple in self.graph) graph = Graph(self.graph.store, c1) graph.remove(triple) self.assert_(triple in self.graph) graph = Graph(self.graph.store, c2) graph.remove(triple) self.assert_(triple in self.graph) self.graph.remove(triple) # now gone! self.assert_(triple not in self.graph) # add again and see if remove without context removes all triples! self.addStuffInMultipleContexts() self.graph.remove(triple) self.assert_(triple not in self.graph) def testContexts(self): triple = (self.pizza, self.hates, self.tarek) # revenge! self.addStuffInMultipleContexts() def cid(c): return c.identifier self.assert_(self.c1 in map(cid, self.graph.contexts())) self.assert_(self.c2 in map(cid, self.graph.contexts())) contextList = map(cid, list(self.graph.contexts(triple))) self.assert_(self.c1 in contextList) self.assert_(self.c2 in contextList) def testRemoveContext(self): c1 = self.c1 self.addStuffInMultipleContexts() self.assertEquals(len(Graph(self.graph.store, c1)), 1) self.assertEquals(len(self.get_context(c1)), 1) self.graph.remove_context(self.get_context(c1)) self.assert_(self.c1 not in self.graph.contexts()) def testRemoveAny(self): Any = None self.addStuffInMultipleContexts() self.graph.remove((Any, Any, Any)) self.assertEquals(len(self.graph), 0) def testTriples(self): tarek = self.tarek michel = self.michel bob = self.bob likes = self.likes hates = self.hates pizza = self.pizza cheese = self.cheese c1 = self.c1 asserte = self.assertEquals triples = self.graph.triples graph = self.graph c1graph = Graph(self.graph.store, c1) c1triples = c1graph.triples Any = None self.addStuff() # unbound subjects with context asserte(len(list(c1triples((Any, likes, pizza)))), 2) asserte(len(list(c1triples((Any, hates, pizza)))), 1) asserte(len(list(c1triples((Any, likes, cheese)))), 3) asserte(len(list(c1triples((Any, hates, cheese)))), 0) # unbound subjects without context, same results! asserte(len(list(triples((Any, likes, pizza)))), 2) asserte(len(list(triples((Any, hates, pizza)))), 1) asserte(len(list(triples((Any, likes, cheese)))), 3) asserte(len(list(triples((Any, hates, cheese)))), 0) # unbound objects with context asserte(len(list(c1triples((michel, likes, Any)))), 2) asserte(len(list(c1triples((tarek, likes, Any)))), 2) asserte(len(list(c1triples((bob, hates, Any)))), 2) asserte(len(list(c1triples((bob, likes, Any)))), 1) # unbound objects without context, same results! asserte(len(list(triples((michel, likes, Any)))), 2) asserte(len(list(triples((tarek, likes, Any)))), 2) asserte(len(list(triples((bob, hates, Any)))), 2) asserte(len(list(triples((bob, likes, Any)))), 1) # unbound predicates with context asserte(len(list(c1triples((michel, Any, cheese)))), 1) asserte(len(list(c1triples((tarek, Any, cheese)))), 1) asserte(len(list(c1triples((bob, Any, pizza)))), 1) asserte(len(list(c1triples((bob, Any, michel)))), 1) # unbound predicates without context, same results! asserte(len(list(triples((michel, Any, cheese)))), 1) asserte(len(list(triples((tarek, Any, cheese)))), 1) asserte(len(list(triples((bob, Any, pizza)))), 1) asserte(len(list(triples((bob, Any, michel)))), 1) # unbound subject, objects with context asserte(len(list(c1triples((Any, hates, Any)))), 2) asserte(len(list(c1triples((Any, likes, Any)))), 5) # unbound subject, objects without context, same results! asserte(len(list(triples((Any, hates, Any)))), 2) asserte(len(list(triples((Any, likes, Any)))), 5) # unbound predicates, objects with context asserte(len(list(c1triples((michel, Any, Any)))), 2) asserte(len(list(c1triples((bob, Any, Any)))), 3) asserte(len(list(c1triples((tarek, Any, Any)))), 2) # unbound predicates, objects without context, same results! asserte(len(list(triples((michel, Any, Any)))), 2) asserte(len(list(triples((bob, Any, Any)))), 3) asserte(len(list(triples((tarek, Any, Any)))), 2) # unbound subjects, predicates with context asserte(len(list(c1triples((Any, Any, pizza)))), 3) asserte(len(list(c1triples((Any, Any, cheese)))), 3) asserte(len(list(c1triples((Any, Any, michel)))), 1) # unbound subjects, predicates without context, same results! asserte(len(list(triples((Any, Any, pizza)))), 3) asserte(len(list(triples((Any, Any, cheese)))), 3) asserte(len(list(triples((Any, Any, michel)))), 1) # all unbound with context asserte(len(list(c1triples((Any, Any, Any)))), 7) # all unbound without context, same result! asserte(len(list(triples((Any, Any, Any)))), 7) for c in [graph, self.get_context(c1)]: # unbound subjects asserte(set(c.subjects(likes, pizza)), set((michel, tarek))) asserte(set(c.subjects(hates, pizza)), set((bob,))) asserte(set(c.subjects(likes, cheese)), set([tarek, bob, michel])) asserte(set(c.subjects(hates, cheese)), set()) # unbound objects asserte(set(c.objects(michel, likes)), set([cheese, pizza])) asserte(set(c.objects(tarek, likes)), set([cheese, pizza])) asserte(set(c.objects(bob, hates)), set([michel, pizza])) asserte(set(c.objects(bob, likes)), set([cheese])) # unbound predicates asserte(set(c.predicates(michel, cheese)), set([likes])) asserte(set(c.predicates(tarek, cheese)), set([likes])) asserte(set(c.predicates(bob, pizza)), set([hates])) asserte(set(c.predicates(bob, michel)), set([hates])) asserte(set(c.subject_objects(hates)), set([(bob, pizza), (bob, michel)])) asserte(set(c.subject_objects(likes)), set([(tarek, cheese), (michel, cheese), (michel, pizza), (bob, cheese), (tarek, pizza)])) asserte(set(c.predicate_objects(michel)), set([(likes, cheese), (likes, pizza)])) asserte(set(c.predicate_objects(bob)), set([(likes, cheese), (hates, pizza), (hates, michel)])) asserte(set(c.predicate_objects(tarek)), set([(likes, cheese), (likes, pizza)])) asserte(set(c.subject_predicates(pizza)), set([(bob, hates), (tarek, likes), (michel, likes)])) asserte(set(c.subject_predicates(cheese)), set([(bob, likes), (tarek, likes), (michel, likes)])) asserte(set(c.subject_predicates(michel)), set([(bob, hates)])) asserte(set(c), set([(bob, hates, michel), (bob, likes, cheese), (tarek, likes, pizza), (michel, likes, pizza), (michel, likes, cheese), (bob, hates, pizza), (tarek, likes, cheese)])) # remove stuff and make sure the graph is empty again self.removeStuff() asserte(len(list(c1triples((Any, Any, Any)))), 0) asserte(len(list(triples((Any, Any, Any)))), 0) # tested via ContextTestCase #class IOMemoryContextTestCase(ContextTestCase): # store = "IOMemory" # slowtest = False try: import persistent # If we can import persistent then test ZODB store class ZODBContextTestCase(ContextTestCase): store = "ZODB" slowtest = False except ImportError, e: print "Can not test ZODB store: %s" % e try: import MySQLdb # If we can import RDF then test Redland store class MySQLContextTestCase(ContextTestCase): store = "MySQL" slowtest = False except ImportError, e: "Can not test MySQL store: %s" % e try: import RDF # If we can import RDF then test Redland store class RedlandContextTestCase(ContextTestCase): store = "Redland" slowtest = False except ImportError, e: print "Can not test Redland store: %s" % e if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/test_datetime.pyc0000644000175000017500000000204211164177225016256 0ustar nachonachoÑò ¯Ic@sZddkZddklZlZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿN(tURIReftLiteraltTestRelativeBasecBseZd„Zd„ZRS(cCstddtdƒƒ|_dS(Ns2008-12-01T18:02:00Ztdatatypes)http://www.w3.org/2001/XMLSchema#dateTime(RRtx(tself((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datetime.pytsetUps cCs |i|i|ijtƒdS(N(t assertEqualsRtTrue(R((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datetime.pyt test_equality s(t__name__t __module__RR (((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datetime.pyRs t__main__(tunittesttrdflibRRtTestCaseRR tmain(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_datetime.pyts  rdflib-2.4.2/test/test_bdb_transaction.py0000644000175000017500000002164111165212345017453 0ustar nachonachoimport unittest, time from context import ContextTestCase from graph import GraphTestCase from rdflib import URIRef, Literal, ConjunctiveGraph from threading import Thread, currentThread from random import random from tempfile import mkdtemp def random_uri(): return URIRef(str(random())) def worker_add(performed_ops, graph, num_ops, input=[]): t1 = time.time() #print "thread: %s started" % currentThread().getName() for i in range(0, num_ops): #print "id: %s, thread: %s" % (i, currentThread().getName()) try: s = random_uri() p = random_uri() o = random_uri() graph.add((s,p,o)) performed_ops.append((s,p,o)) except Exception, e: print "could not perform op", e raise e print "%s triples, add time: %.4f, thread: %s" % (num_ops, (time.time() - t1), currentThread().getName()) def worker_remove(performed_ops, graph, num_ops, input=[]): t1 = time.time() #print "thread: %s started" % currentThread().getName() for i in range(0, num_ops): #print "id: %s, thread: %s" % (i, currentThread().getName()) try: try: s,p,o = input.pop() except: s = random_uri() p = random_uri() o = random_uri() graph.remove((s,p,o)) performed_ops.append((s,p,o)) except Exception, e: raise e #print "could not perform op", e print "remove time: %.4f, thread: %s" % ((time.time() - t1), currentThread().getName()) class TestBDBGraph(GraphTestCase): store_name = "BerkeleyDB" class TestBDBContext(ContextTestCase): store = "BerkeleyDB" class TestBDBTransactions(unittest.TestCase): slowtest = True def setUp(self): self.graph = ConjunctiveGraph(store="BerkeleyDB") self.path = mkdtemp() self.graph.open(self.path, create=True) def tearDown(self): self.graph.close() def get_context(self, identifier): assert isinstance(identifier, URIRef) or \ isinstance(identifier, BNode), type(identifier) return Graph(store=self.graph.store, identifier=identifier, namespace_manager=self) def __manyOpsManyThreads(self, worker, workers=10, triples=1000, input=[]): all_ops = [] pool = [] for i in range(0, workers): t = Thread(target=worker, args=(all_ops, self.graph, triples), kwargs={'input':input}) pool.append(t) t.start() for t in pool: t.join() return all_ops def testAddManyManyThreads(self): # TODO: sometimes this test leads to TypeError exceptions? w = 4 t = 1000 self.__manyOpsManyThreads(worker_add, workers=w, triples=t) #print "graph size after finish: ", len(self.graph) self.failUnless(len(self.graph) == w*t) def testRemove(self): ops = self.__manyOpsManyThreads(worker_add, workers=1, triples=10) self.__manyOpsManyThreads(worker_remove, workers=1, triples=7, input=ops) #print "graph size after finish: ", len(self.graph) self.failUnless(len(self.graph) == 3) def testRemoveAll(self): ops = self.__manyOpsManyThreads(worker_add, workers=1, triples=10) try: self.graph.remove((None, None, None)) except Exception, e: #print "Could not remove all: ", e raise e #print "graph size after finish: ", len(self.graph) self.failUnless(len(self.graph) == 0) def testReadWrite(self): triples = 1000 def _worker_transaction(): # self.graph.store.begin_txn() try: worker_add([], self.graph, triples) # self.graph.store.commit() except Exception, e: print "got exc: ", e # self.graph.store.rollback() def _read(): self.graph.store.begin_txn() try: res = [r for r in self.graph.triples((None, None, None))] self.graph.store.commit() except Exception, e: print "got exc: ", e self.graph.store.rollback() add_t = Thread(target=_worker_transaction) read_t = Thread(target=_read) add_t.start() time.sleep(0.1) read_t.start() add_t.join() read_t.join() def testAddUserTransaction(self): workers = 2 triples = 2000 def _worker(): t1 = time.time() success = False delay = 1 while not success: txn = self.graph.store.begin_txn() try: #print "thread: %s started, txn: %s" % (currentThread().getName(), txn) retry = False for i in range(0, triples): s = random_uri() p = random_uri() o = random_uri() self.graph.add((s,p,o)) #print "triple: %s, thread: %s" % (i, currentThread().getName()) except Exception, e: #print "could not complete transaction: ", e, delay self.graph.store.rollback() time.sleep(0.1*delay) delay = delay << 1 else: self.graph.store.commit() success = True print "%s triples add time: %.4f, thread: %s" % (triples, (time.time() - t1), currentThread().getName()) pool = [] for i in range(0, workers): t = Thread(target=_worker) pool.append(t) t.start() for t in pool: t.join() # print "graph size after finish: ", len(self.graph) self.failUnless(len(self.graph) == workers*triples) def testCloseCommit(self): triples = 1000 def _worker_transaction(): self.graph.store.begin_txn() try: worker_add([], self.graph, triples) self.graph.store.commit() except Exception, e: print "got exc: ", e self.graph.store.rollback() def _close(): self.graph.store.close(commit_pending_transaction=True) add_t = Thread(target=_worker_transaction) close_t = Thread(target=_close) add_t.start() time.sleep(0.5) close_t.start() add_t.join() print "add finished" close_t.join() print "close finished" #self.graph.open(self.path, create=False) #print "store length: ", len(self.graph) #self.failUnless() def testCloseOpen(self): # setUp opened self.graph.store.close() self.graph.store.open(self.path, create=False) if __name__ == "__main__": bdb_suite = unittest.TestSuite() bdb_suite.addTest(TestBDBTransactions('testAddManyManyThreads')) bdb_suite.addTest(TestBDBTransactions('testAddUserTransaction')) bdb_suite.addTest(TestBDBTransactions('testRemove')) bdb_suite.addTest(TestBDBTransactions('testRemoveAll')) bdb_suite.addTest(TestBDBTransactions('testCloseCommit')) bdb_suite.addTest(TestBDBTransactions('testCloseOpen')) bdb_suite.addTest(TestBDBTransactions('testReadWrite')) context_suite = unittest.TestSuite() context_suite.addTest(TestBDBContext('testAdd')) context_suite.addTest(TestBDBContext('testRemove')) context_suite.addTest(TestBDBContext('testLenInOneContext')) context_suite.addTest(TestBDBContext('testLenInMultipleContexts')) context_suite.addTest(TestBDBContext('testConjunction')) context_suite.addTest(TestBDBContext('testRemoveInMultipleContexts')) context_suite.addTest(TestBDBContext('testContexts')) context_suite.addTest(TestBDBContext('testRemoveContext')) context_suite.addTest(TestBDBContext('testRemoveAny')) context_suite.addTest(TestBDBContext('testTriples')) context_suite.addTest(TestBDBContext('testContexts')) graph_suite = unittest.TestSuite() graph_suite.addTest(TestBDBGraph('testAdd')) graph_suite.addTest(TestBDBGraph('testRemove')) graph_suite.addTest(TestBDBGraph('testTriples')) graph_suite.addTest(TestBDBGraph('testStatementNode')) graph_suite.addTest(TestBDBGraph('testGraphValue')) graph_suite.addTest(TestBDBGraph('testConnected')) unittest.TextTestRunner(verbosity=2).run(graph_suite) unittest.TextTestRunner(verbosity=2).run(context_suite) unittest.TextTestRunner(verbosity=2).run(bdb_suite) # unittest.main() rdflib-2.4.2/test/Sleepycat.pyc0000644000175000017500000000176011164176136015362 0ustar nachonachoÑò ¯Ic @sÊddklZy*ddklZdefd„ƒYZWn&ej oZeideƒnXddk l Z y*ddklZde fd„ƒYZ Wn&ej oZeideƒnXd S( iÿÿÿÿ(t GraphTestCase(t SleepycattSleepycatGraphTestCasecBseZdZRS(R(t__name__t __module__t store_name(((s9/Users/eikeon/rdflib-svn/branches/2.4.x/test/Sleepycat.pyRss Can not test Sleepycat store: %s(tContextTestCasetSleepycatStoreTestCasecBseZdZRS(R(RRtstore(((s9/Users/eikeon/rdflib-svn/branches/2.4.x/test/Sleepycat.pyRsN( t test.graphRtrdflib.store.SleepycatRRt ImportErrortet_loggertwarningt test.contextRR(((s9/Users/eikeon/rdflib-svn/branches/2.4.x/test/Sleepycat.pytsrdflib-2.4.2/test/sparql_parser_nestedbrackets.py0000644000175000017500000000103211153616026021210 0ustar nachonachofrom rdflib.sparql.bison import Parse # second query from here: # http://www.w3.org/TR/rdf-sparql-query/#GroupPatterns query = """ PREFIX foaf: SELECT ?name ?mbox WHERE { { ?x foaf:name ?name . } { ?x foaf:mbox ?mbox . } } """ correct = """{ [] }""" if __name__ == "__main__": p = Parse(query) tmp = p.query.whereClause.parsedGraphPattern if str(tmp) == correct: print "PASSED" rdflib-2.4.2/test/events.py0000644000175000017500000000303311153616026014560 0ustar nachonacho import unittest from rdflib import events class AddedEvent(events.Event): pass class RemovedEvent(events.Event): pass def subscribe_to(source, target): target.subscribe(AddedEvent, source._add_handler) target.subscribe(RemovedEvent, source._remove_handler) def subscribe_all(caches): for cache in caches: for other in caches: if other != cache: subscribe_to(cache, other) class Cache(events.Dispatcher): def __init__(self, data=None): if data is None: data = {} self._data = data self.subscribe(AddedEvent, self._add_handler) self.subscribe(RemovedEvent, self._remove_handler) def _add_handler(self, event): self._data[event.key] = event.value def _remove_handler(self, event): del self._data[event.key] def __getitem__(self, key): return self._data[key] def __setitem__(self, key, value): self.dispatch(AddedEvent(key=key, value=value)) def __delitem__(self, key): self.dispatch(RemovedEvent(key=key)) def has_key(self, key): return self._data.has_key(key) class EventTestCase(unittest.TestCase): def testEvents(self): c1 = Cache() c2 = Cache() c3 = Cache() subscribe_all([c1,c2,c3]) c1['bob'] = 'uncle' assert c2['bob'] == 'uncle' assert c3['bob'] == 'uncle' del c3['bob'] assert c1.has_key('bob') == False assert c2.has_key('bob') == False if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/test_empty_xml_base.pyc0000644000175000017500000000614411164177225017501 0ustar nachonachoÑò ¯Ic@sËdZddklZlZlZlZlZddklZddkZedƒZ dZ dZ edƒZ ed ƒZ d eifd „ƒYZd eifd „ƒYZedjoeiƒndS(s‹ Test for empty xml:base values xml:base='' should resolve to the given publicID per XML Base specification and RDF/XML dependence on it iÿÿÿÿ(tConjunctiveGraphtLiteraltURIReft NamespacetRDF(tStringIONshttp://xmlns.com/foaf/0.1/s  s shttp://example.com/shttp://example.com/foo/bart TestEmptyBasecBseZd„Zd„ZRS(cCs,tƒ|_|iittƒdtƒdS(NtpublicID(RtgraphtparseRt test_datatbaseUri(tself((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pytsetUp's cCsO|it|iƒdjdƒ|ittitif|ijdtƒdS(Nis3There should be at least one statement in the graphs/There should be a triple with %s as the subject(t failUnlesstlenRR RttypetFOAFtDocument(R ((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyt test_base_ref+s(t__name__t __module__R R(((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyR&s tTestRelativeBasecBseZd„Zd„ZRS(cCs,tƒ|_|iittƒdtƒdS(NR(RRR Rt test_data2tbaseUri2(R ((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyR 0s cCs[|it|iƒdjdƒtdƒ}|i|titif|ijd|ƒdS(Nis3There should be at least one statement in the graphshttp://example.com/bazs/There should be a triple with %s as the subject(RRRRRRRR(R t resolvedBase((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyR4s (RRR R(((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyR/s t__main__(t__doc__trdflibRRRRRRtunittestRR RR RtTestCaseRRRtmain(((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_empty_xml_base.pyts(       rdflib-2.4.2/test/IdentifierEquality.py0000644000175000017500000000520111153616026017053 0ustar nachonachoimport unittest from rdflib import URIRef, BNode, Literal from rdflib.syntax.parsers.RDFXMLHandler import CORE_SYNTAX_TERMS from rdflib.Graph import Graph from rdflib import RDF """ Ah... it's coming back to me... [6:32p] eikeon: think it's so transitivity holds... [6:32p] eikeon: if a==b and b==c then a should == c [6:32p] eikeon: "foo"==Literal("foo") [6:33p] eikeon: We don't want URIRef("foo")==Literal("foo") [6:33p] eikeon: But if we have URIRef("foo")=="foo" then it implies it. [6:33p] chimezie: yes, definately not the other RDFLib 'typed' RDF (and N3) terms [6:34p] eikeon: Why do you need URIRef("foo")=="foo" ? [6:34p] chimezie: i'm just wondering if a URI and a string with the same lexical value, are by definition 'different' [6:35p] eikeon: Think so, actually. Think of trying to serialize some triples. [6:36p] eikeon: If they are the same you'd serialize them the same, no? [6:36p] chimezie: I guess I was thinking of a 'string' in a native datatype sense, not in the RDF sense (where they would be distinctly different) [6:37p] eikeon: We should try and brain dump some of this... [6:37p] eikeon: it look a fairly long time to work out. [6:37p] eikeon: But think we finally landed in the right spot. [6:38p] eikeon: I know many of the backends break if URIRef("foo")==Literal("foo") [6:39p] eikeon: And if we want "foo"==Literal("foo") --- then we really can't have URIRef("foo") also == "foo" """ class IdentifierEquality(unittest.TestCase): def setUp(self): self.uriref = URIRef("http://example.org/") self.bnode = BNode() self.literal = Literal("http://example.org/") self.python_literal = u"http://example.org/" self.python_literal_2 = u"foo" def testA(self): self.assertEquals(self.uriref==self.literal, False) def testB(self): self.assertEquals(self.literal==self.uriref, False) def testC(self): self.assertEquals(self.uriref==self.python_literal, False) def testD(self): self.assertEquals(self.python_literal==self.uriref, False) def testE(self): self.assertEquals(self.literal==self.python_literal, True) def testF(self): self.assertEquals(self.python_literal==self.literal, True) def testG(self): self.assertEquals("foo" in CORE_SYNTAX_TERMS, False) def testH(self): self.assertEquals(URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#RDF") in CORE_SYNTAX_TERMS, True) def testI(self): g = Graph() g.add((self.uriref, RDF.value, self.literal)) g.add((self.uriref, RDF.value, self.uriref)) self.assertEqual(len(g), 2) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/bdb_optimized.pyc0000644000175000017500000000454711164176136016252 0ustar nachonachoÑò ¯Ic@s&ddkZddklZddklZddklZddklZdefd„ƒYZ defd „ƒYZ d dd „ƒYZ e d joŠei ƒZei ƒZeie d ƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒei ƒZeie d ƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeie dƒƒeiddƒieƒeiddƒieƒndS(iÿÿÿÿN(tConjunctiveGraph(tContextTestCase(t GraphTestCase(tmkdtempt TestBDBGraphcBseZdZRS(t BDBOptimized(t__name__t __module__t store_name(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pyRstTestBDBContextcBseZdZRS(R(RRtstore(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pyR stTestBDBOptimizedcBseZd„Zd„ZRS(cCs;tddƒ|_tƒ|_|ii|idtƒdS(NR Rtcreate(RtgraphRtpathtopentTrue(tself((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pytsetUps cCs|iiƒdS(N(R tclose(R((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pyttearDowns(RRRR(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pyR s t__main__ttestAddt testRemovettestLenInOneContextttestLenInMultipleContextsttestConjunctionttestRemoveInMultipleContextst testContextsttestRemoveContextt testRemoveAnyt testTriplesttestStatementNodettestGraphValuet testConnectedt verbosityi((tunittestt rdflib.GraphRtcontextRR RttempfileRRR R Rt TestSuitet bdb_suitet context_suitetaddTestt graph_suitetTextTestRunnertrun(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/bdb_optimized.pyts:     rdflib-2.4.2/test/leaves.pyc0000644000175000017500000000207111164176136014704 0ustar nachonachoÑò ¯Ic @sqddkZddkZdZdZddklZd„Zedjo#eiddeƒd ei ƒndS( iÿÿÿÿNsÎ @prefix foaf: . @prefix : . :a foaf:knows :b . :a foaf:knows :c . :a foaf:knows :d . :b foaf:knows :a . :b foaf:knows :c . :c foaf:knows :a . s PREFIX foaf: select distinct ?person where { ?person foaf:knows ?a . ?person foaf:knows ?b . filter (?a != ?b) . } (tStringIOcCs"tddddtidtƒƒS(Ns leaves.txttpackagetrdflibt optionflagstglobs(t DocFileSuitetdoctesttELLIPSIStlocals(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/leaves.pyt test_leaves#s  t__main__s leaves.txtRR( tunittestRtdatatqueryRR t__name__ttestfiletglobalsR(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/leaves.pyts     rdflib-2.4.2/test/leaves.txt0000644000175000017500000000430311153616026014723 0ustar nachonacho=========================================== Test named graph selects and leaf selects =========================================== >>> from rdflib import ConjunctiveGraph as CG, Namespace >>> from rdflib.Graph import Graph >>> print data @prefix foaf: . @prefix : . :a foaf:knows :b . :a foaf:knows :c . :a foaf:knows :d . :b foaf:knows :a . :b foaf:knows :c . :c foaf:knows :a ... >>> print query PREFIX foaf: ... select distinct ?person where { ?person foaf:knows ?a . ?person foaf:knows ?b . filter (?a != ?b) . }... >>> from rdflib import RDF >>> graph = Graph(identifier=RDF.RDFNS) >>> graph.parse(StringIO(data), format='n3') )> >>> print graph.query(query).serialize('json') { "head" : { "vars" : [ "person" ] }, "results" : { "ordered" : false, "distinct" : true, "bindings" : [ { "person" : {"type": "uri", "value" : "tag:example.org,2007;stuff/b"} }, { "person" : {"type": "uri", "value" : "tag:example.org,2007;stuff/a"} } ] } }... >>> from rdflib.URIRef import URIRef >>> graph = Graph(identifier=URIRef('http://bobby')).parse(StringIO(data), format='n3') >>> print graph.query(query).serialize('json') { "head" : { "vars" : [ "person" ] }, "results" : { "ordered" : false, "distinct" : true, "bindings" : [ { "person" : {"type": "uri", "value" : "tag:example.org,2007;stuff/b"} }, { "person" : {"type": "uri", "value" : "tag:example.org,2007;stuff/a"} } ] } }... rdflib-2.4.2/test/test_sparql_advanced.py0000644000175000017500000000323611153616026017447 0ustar nachonachoimport unittest import doctest from rdflib import RDF,RDFS, Namespace from rdflib import Variable from rdflib.sparql import DESCRIBE from rdflib.Graph import Graph from cStringIO import StringIO testData=""" @prefix rdf: . @prefix rdfs: . @prefix : . :foo :relatedTo [ a rdfs:Class ]; :parentOf ( [ a rdfs:Class ] ). :bar :relatedTo [ a rdfs:Resource ]; :parentOf ( [ a rdfs:Resource ] ). ( [ a rdfs:Resource ] ) :childOf :bar. ( [ a rdfs:Class ] ) :childOf :foo. """ testData2=""" @prefix foaf: . _:a foaf:name "Alice" . _:a foaf:mbox . """ testGraph=Graph().parse(StringIO(testData2), format='n3') FOAF =Namespace('http://xmlns.com/foaf/0.1/') VCARD=Namespace('http://www.w3.org/2001/vcard-rdf/3.0#') def describeOverride(terms,bindings,graph): g=Graph() for term in terms: if isinstance(term,Variable) and term not in bindings: continue else: term=bindings.get(term,term) for s,p,o in graph.triples((term,FOAF.mbox,None)): g.add((s,p,o)) return g namespaces={u'rdfs' : RDF.RDFNS, u'rdf' : RDFS.RDFSNS, u'foaf' : FOAF, u'vcard': VCARD, u'ex' : Namespace('http://example.org/person#') } for prefix,uri in namespaces.items(): testGraph.namespace_manager.bind(prefix, uri, override=False) if __name__ == "__main__": doctest.testfile("test_sparql_advanced.txt", globs=globals(), optionflags = doctest.ELLIPSIS) rdflib-2.4.2/test/n3.pyc0000644000175000017500000000663511164176136013757 0ustar nachonachoÑò ¯Ic@sjddkTdZddkZddklZlZdeifd„ƒYZedjoei ƒndS(iÿÿÿÿ(t*s # Definitions of terms describing the n3 model # @keywords a. @prefix n3: <#>. @prefix log: . @prefix rdf: . @prefix rdfs: . @prefix : <#> . @forAll :s, :p, :x, :y, :z. n3:Statement a rdf:Class . n3:StatementSet a rdf:Class . n3:includes a rdfs:Property . # Cf rdf:li n3:predicate a rdf:Property; rdfs:domain n3:statement . n3:subject a rdf:Property; rdfs:domain n3:statement . n3:object a rdf:Property; rdfs:domain n3:statement . n3:context a rdf:Property; rdfs:domain n3:statement; rdfs:range n3:StatementSet . ########### Rules { :x :p :y . } log:means { [ n3:subject :x; n3:predicate :p; n3:object :y ] a log:Truth}. # Needs more thought ... ideally, we have the implcit AND rules of # juxtaposition (introduction and elimination) { { { :x n3:includes :s. } log:implies { :y n3:includes :s. } . } forall :s1 . } log:implies { :x log:implies :y } . { { { :x n3:includes :s. } log:implies { :y n3:includes :s. } . } forall :s1 } log:implies { :x log:implies :y } . # I think n3:includes has to be axiomatic builtin. - unless you go to syntax description. # syntax.n3? N(tGraphtConjunctiveGrapht N3TestCasecBs5eZd„Zd„Zd„Zd„Zd„ZRS(cCsdS(N((tself((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pytsetUpBscCsdS(N((R((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyttearDownEscCs5d}tƒ}|it|it|ƒddƒdS(s• Test that the n3 parser throws an Exception when using the identifier ":foo.txt", as this is not valid as per the rdf spec. s9 @prefix : . :foo.txt :p :q . tformattn3N(Rt assertRaisest ExceptiontparsetStringInputSource(Rtinputtg((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyt testFileNameHs  cCs tƒ}|ittƒddƒd}x5|D]-\}}}t|tƒo|d7}q/q/W|i|dƒ|itt|i ƒƒƒdƒ|i ƒdS(NRRiiii ( RR R R t isinstanceRt assertEqualstlentlisttcontextstclose(RRtitstpto((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyt testModel[s "cCs tƒ}|idddƒdS(NsGhttp://groups.csail.mit.edu/dig/2005/09/rein/examples/troop42-policy.n3RR(RR (RR((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyt testParsehs (t__name__t __module__RRRRR(((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyR@s     t__main__( trdflibR tunittestt rdflib.GraphRRtTestCaseRRtmain(((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/n3.pyts 6 , rdflib-2.4.2/test/trix.py0000755000175000017500000000145111153616026014247 0ustar nachonacho#!/usr/bin/env python import sys sys.path[0:0]+=[".."] import rdflib import unittest class TriXTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testAperture(self): g=rdflib.Graph() g.parse("trix/aperture.trix",format="trix") c=list(g.contexts()) #print list(g.contexts()) t=sum(map(lambda x: len(g.get_context(x)),g.contexts())) self.assertEquals(t,24) self.assertEquals(len(c),4) #print "Parsed %d triples"%t def testSpec(self): g=rdflib.Graph() g.parse("trix/nokia_example.trix",format="trix") #print "Parsed %d triples"%len(g) if __name__=='__main__': unittest.main() rdflib-2.4.2/test/test_sparql_json_results.py0000644000175000017500000000633311153616026020435 0ustar nachonachofrom rdflib import ConjunctiveGraph from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . a foaf:Person; foaf:name "Alice"; foaf:knows . a foaf:Person; foaf:name "Bob" . """ PROLOGUE = """ PREFIX foaf: """ test_material = {} test_material['optional'] = (PROLOGUE+""" SELECT ?name ?x ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } """, """"name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"}, "x" : {"type": "uri", "value" : "http://example.org/bob"} }""" ) test_material['select_vars'] = (PROLOGUE+""" SELECT ?name ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } }""", """"vars" : [ "name", "friend" ]""" ) test_material['wildcard'] = (PROLOGUE+""" SELECT * WHERE { ?x foaf:name ?name . } """, """"name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"}, "x" : {"type": "uri", "value" : "http://example.org/bob"} }""" ) test_material['wildcard_vars'] = (PROLOGUE+""" SELECT * WHERE { ?x foaf:name ?name . } """, """"vars" : [ "name", "x" ]""" ) test_material['union'] = (PROLOGUE+""" SELECT DISTINCT ?name WHERE { { foaf:name ?name . } UNION { foaf:name ?name . } } """, """{ "name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"} }, { "name" : {"type": "literal", "xml:lang" : "None", "value" : "Alice"} }""" ) test_material['union3'] = (PROLOGUE+""" SELECT DISTINCT ?name WHERE { { foaf:name ?name . } UNION { foaf:name ?name . } UNION { foaf:name ?name . } } """, '"Alice"' ) def make_method(testname): def test(self): query, correct = test_material[testname] self._query_result_contains(query, correct) test.__name__ = 'test%s' % testname.title() return test class TestSparqlJsonResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def _query_result_contains(self, query, correct): results = self.graph.query(query) result_json = results.serialize(format='json') self.failUnless(result_json.find(correct) >= 0, "Expected:\n %s \n- to contain:\n%s" % (result_json, correct)) testOptional = make_method('optional') testWildcard = make_method('wildcard') testUnion = make_method('union') testUnion3 = make_method('union3') testSelectVars = make_method('select_vars') testWildcardVars = make_method('wildcard_vars') if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/n3Test.py0000755000175000017500000000262311153616026014443 0ustar nachonacho#!/usr/bin/env python2.4 import os, traceback, sys, unittest #sys.path[:0]=[".."] import rdflib def crapCompare(g1,g2): "A really crappy way to 'check' if two graphs are equal. It ignores blank nodes completely" if len(g1)!=len(g2): raise Exception("Graphs dont have same length") for t in g1: if not isinstance(t[0],rdflib.BNode): s=t[0] else: s=None if not isinstance(t[2],rdflib.BNode): o=t[2] else: o=None if not (s,t[1],o) in g2: e="(%s,%s,%s) is not in both graphs!"%(s,t[1],o) raise Exception, e def test(f, prt=False): g=rdflib.ConjunctiveGraph() if f.endswith('rdf'): g.parse(f) else: g.parse(f, format='n3') if prt: for t in g: print t print "========================================\nParsed OK!" s=g.serialize(format='n3') if prt: print s g2=rdflib.ConjunctiveGraph() g2.parse(rdflib.StringInputSource(s),format='n3') if prt: print g2.serialize() crapCompare(g,g2) if len(sys.argv)>1: test(sys.argv[1], True) sys.exit() class TestN3Writing(unittest.TestCase): def testWriting(self): for f in os.listdir('test/n3'): if f!='.svn': test("test/n3/"+f) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/bdb_optimized.py0000644000175000017500000000340311153616026016070 0ustar nachonachoimport unittest from rdflib.Graph import ConjunctiveGraph from context import ContextTestCase from graph import GraphTestCase from tempfile import mkdtemp class TestBDBGraph(GraphTestCase): store_name = "BDBOptimized" class TestBDBContext(ContextTestCase): store = "BDBOptimized" class TestBDBOptimized: def setUp(self): self.graph = ConjunctiveGraph(store="BDBOptimized") self.path = mkdtemp() self.graph.open(self.path, create=True) def tearDown(self): self.graph.close() if __name__ == "__main__": bdb_suite = unittest.TestSuite() context_suite = unittest.TestSuite() context_suite.addTest(TestBDBContext('testAdd')) context_suite.addTest(TestBDBContext('testRemove')) context_suite.addTest(TestBDBContext('testLenInOneContext')) context_suite.addTest(TestBDBContext('testLenInMultipleContexts')) context_suite.addTest(TestBDBContext('testConjunction')) context_suite.addTest(TestBDBContext('testRemoveInMultipleContexts')) context_suite.addTest(TestBDBContext('testContexts')) context_suite.addTest(TestBDBContext('testRemoveContext')) context_suite.addTest(TestBDBContext('testRemoveAny')) context_suite.addTest(TestBDBContext('testTriples')) graph_suite = unittest.TestSuite() graph_suite.addTest(TestBDBGraph('testAdd')) graph_suite.addTest(TestBDBGraph('testRemove')) graph_suite.addTest(TestBDBGraph('testTriples')) graph_suite.addTest(TestBDBGraph('testStatementNode')) graph_suite.addTest(TestBDBGraph('testGraphValue')) graph_suite.addTest(TestBDBGraph('testConnected')) unittest.TextTestRunner(verbosity=2).run(graph_suite) unittest.TextTestRunner(verbosity=2).run(context_suite) # unittest.main() rdflib-2.4.2/test/nt.pyc0000644000175000017500000000216511164176136014052 0ustar nachonachoÑò ¯Ic@s^ddkZddkTddklZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿN(t*(tGrapht NTTestCasecBs#eZd„Zd„Zd„ZRS(cCsdS(N((tself((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/nt.pytsetUpscCsdS(N((R((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/nt.pyttearDown scCs tƒ}|idddƒdS(NsThttp://www.w3.org/2000/10/rdf-tests/rdfcore/rdfms-empty-property-elements/test002.nttformattnt(Rtload(Rtg((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/nt.pyt testModels (t__name__t __module__RRR (((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/nt.pyRs  t__main__(tunittesttrdflibt rdflib.GraphRtTestCaseRR tmain(((s2/Users/eikeon/rdflib-svn/branches/2.4.x/test/nt.pyts   rdflib-2.4.2/test/sparql_regex.pyc0000644000175000017500000000317411164176137016127 0ustar nachonachoÑò ¯Ic@s†ddklZlZddklZddklZddkZdZdZdei fd„ƒYZ e d joei ƒndS( iÿÿÿÿ(tConjunctiveGraphtplugin(tStore(tStringIONsP @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . s‡ PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . FILTER regex(?name, "a", "i") } t TestRegexcBseZd„ZRS(cCs¡ttidtƒƒƒ}|ittƒddƒ|itƒ}|i t g}|D]3}d|djpd|djo ||qWqW~ƒdjƒdS(NtIOMemorytformattn3taitAi( RRtgetRtparseRt test_datatqueryt test_queryt failUnlesstlen(tselftgraphtresultst_[1]R((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_regex.pyt testRegexs(t__name__t __module__R(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_regex.pyRst__main__( trdflibRRt rdflib.storeRRtunittestR RtTestCaseRRtmain(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_regex.pyts   rdflib-2.4.2/test/test_sparql_advanced.pyc0000644000175000017500000000430211164177226017613 0ustar nachonachoÑò ¯Ic@s^ddkZddkZddklZlZlZddklZddklZddk l Z ddk l Z dZ dZe ƒie eƒd d ƒZed ƒZed ƒZd „Zheid6eid6ed6ed6edƒd6Zx3eiƒD]%\ZZeiieedeƒqWedjo#eiddeƒdei ƒndS(iÿÿÿÿN(tRDFtRDFSt Namespace(tVariable(tDESCRIBE(tGraph(tStringIOs @prefix rdf: . @prefix rdfs: . @prefix : . :foo :relatedTo [ a rdfs:Class ]; :parentOf ( [ a rdfs:Class ] ). :bar :relatedTo [ a rdfs:Resource ]; :parentOf ( [ a rdfs:Resource ] ). ( [ a rdfs:Resource ] ) :childOf :bar. ( [ a rdfs:Class ] ) :childOf :foo. s~ @prefix foaf: . _:a foaf:name "Alice" . _:a foaf:mbox . tformattn3shttp://xmlns.com/foaf/0.1/s%http://www.w3.org/2001/vcard-rdf/3.0#cCs™tƒ}x‰|D]}t|tƒo||joqn|i||ƒ}xB|i|tidfƒD]%\}}}|i|||fƒqhWqW|S(N( Rt isinstanceRtgetttriplestFOAFtmboxtNonetadd(ttermstbindingstgraphtgttermtstpto((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_advanced.pytdescribeOverride#s urdfsurdfufoafuvcardshttp://example.org/person#uextoverridet__main__stest_sparql_advanced.txttglobst optionflags(!tunittesttdoctesttrdflibRRRRt rdflib.sparqlRt rdflib.GraphRt cStringIORttestDatat testData2tparset testGraphR tVCARDRtRDFNStRDFSNSt namespacestitemstprefixturitnamespace_managertbindtFalset__name__ttestfiletglobalstELLIPSIS(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_advanced.pyts0          rdflib-2.4.2/test/test_sparql_advanced.txt0000644000175000017500000000604211153616026017634 0ustar nachonacho================================================================ Advanced SPARQL: CONSTRUCT, user-defined DESCRIBE and extensions ================================================================ I. CONSTRUCT support CONSTRUCT queries are supported and differ from SELECT/ASK queries primarily in that the rdflib.sparql.SPARQLQueryResultQueryResult object returned will forward the format argument to the serialize method of the 'constructed' graph. The 'result' attribute will be set to an instance of this constructured (in-memory) Graph. >>> testGraph.query( ... "CONSTRUCT { ex:Alice vcard:FN ?name }\ ... WHERE { ?x foaf:name ?name }", ... initNs=namespaces, ... ).serialize(format='nt') ' "Alice".\n' The result of a CONSTRUCT can be parsed into a graph >>> rt=testGraph.query( ... "CONSTRUCT { ex:Alice vcard:FN ?name }\ ... WHERE { ?x foaf:name ?name }", ... initNs=namespaces, ... ).serialize(format='xml') >>> [ (s.n3(), ... p.n3(), ... o.n3()) for s,p,o in Graph().parse(StringIO(rt))] [(u'', u'', u'"Alice"')] II. User-defined DESCRIBE Describe support is implemented by allowing the user to pass in a method which takes as arguments: the terms listed in the DESCRIBE expression, a solution binding, and the underlying dataset Graph (a ConjunctiveGraph/Graph) and returns an description graph (a Graph instance). The default describe method will simply return all incoming and outgoing statements as the resulting graph: =============================================================================== def describe(terms,bindings,graph): """ Default DESCRIBE returns all incomming and outgoing statements about the given terms """ from rdflib.sparql.sparqlOperators import getValue g=Graph() terms=[getValue(i)(bindings) for i in terms] for s,p,o in graph.triples_choices((terms,None,None)): g.add((s,p,o)) for s,p,o in graph.triples_choices((None,None,terms)): g.add((s,p,o)) return g =============================================================================== >>> rt=testGraph.query( ... "DESCRIBE ?x WHERE { ?x foaf:name ?name }", ... initNs=namespaces ... ).result >>> len(rt) 2 A user-defined method is specified by passing it in as an extension function bound to >>> rt=testGraph.query( ... "DESCRIBE ?x WHERE { ?x foaf:name ?name }", ... initNs=namespaces, ... extensionFunctions={DESCRIBE:describeOverride} ... ).result >>> list(rt) [(rdflib.BNode('.........'), rdflib.URIRef('http://xmlns.com/foaf/0.1/mbox'), rdflib.URIRef('mailto:alice@example.org'))] >>> [ (s.n3(), ... p.n3(), ... o.n3()) for s,p,o in rt] [(u'_:.........', u'', u'')] rdflib-2.4.2/test/test_sparql_literal_patterns.pyc0000644000175000017500000000456211164177226021432 0ustar nachonachoÑò ¯Ic @s„ddklZlZlZddklZddklZdZeƒZei eeƒddƒdZ edƒZ e d Z d e d e gfd e d e gfde de gfde de gfde de gfde de gfde de gfgZ d„Zd„ZedjoVddklZe eedƒ\ZZZeieƒZeeeeieƒndS(iÿÿÿÿ(tConjunctiveGraphtURIReftLiteral(tStringIO(tdates‚ @prefix rdfs: . @prefix xsd: . @prefix : . :plain "plain"; :integer 1; :float 1.1; :string "string"^^xsd:string; :date "2007-04-28"^^xsd:date; rdfs:label "Thing"@en, "Sak"@sv . tformattn3s¤ PREFIX rdfs: PREFIX xsd: PREFIX t: shttp://example.org/things! SELECT ?uri WHERE { ?uri %s . } tplainst:plain "plain"tintegers t:integer 1tfloats t:float 1.1t langlabel_ensrdfs:label "Thing"@ent langlabel_svsrdfs:label "Sak"@svtstringst:string "string"^^xsd:stringRst:date "2007-04-28"^^xsd:datecCs+||jptd||||f‚dS(Ns0Failed test "%s": %s , expected %s and got %s (tAssertionError(tnametsparqltrealtexpected((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_literal_patterns.pyt assert_equal,sccsDx=tD]5\}}}ti|ƒ}t|||i|fVqWdS(N(t TEST_DATAtgraphtqueryRtselected(RRRtres((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_literal_patterns.pyttest_generator0st__main__(targviN(trdflibRRRRtdatetimeRttestRdfRtloadtPROLOGUEtthingtSPARQLRRRt__name__tsysRtintRRRRRR(((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_literal_patterns.pyts,     rdflib-2.4.2/test/sparql_order_by.py0000644000175000017500000000174711153616026016455 0ustar nachonachofrom rdflib import ConjunctiveGraph, plugin, Literal from rdflib.store import Store from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . """ test_query = """ PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . } ORDER BY ?name """ class TestOrderBy(unittest.TestCase): def testOrderBy(self): graph = ConjunctiveGraph(plugin.get('IOMemory',Store)()) graph.parse(StringIO(test_data), format="n3") results = graph.query(test_query) self.failUnless(False not in [r[0] == a for r, a in zip(results, ['Alice', 'Bob', 'Charlie', 'Dave'])]) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/graph.pyc0000644000175000017500000001477311164176136014542 0ustar nachonachoÑò ¯Ic@sddkZddklZddklZlZlZlZddkl Z dei fd„ƒYZ y&ddk Z de fd„ƒYZ Wnej oZd eGHnXy&ddkZd e fd „ƒYZWnej oZd eGHnXed joeiƒndS(iÿÿÿÿN(tmkdtemp(tURIReftBNodetLiteraltRDF(tGrapht GraphTestCasecBsteZdZd ZeZd„Zd„Zd„Z d„Z d„Z d„Z d„Z d„Zd „Zd „ZRS( tdefaultcCs±td|iƒ|_tƒ}|ip||_|ii|iƒtdƒ|_tdƒ|_tdƒ|_ tdƒ|_ tdƒ|_ tdƒ|_ tdƒ|_ dS( Ntstoreumichelutarekubobulikesuhatesupizzaucheese(Rt store_nametgraphRtpathtopenRtmichelttarektbobtlikesthatestpizzatcheese(tselft a_tmp_dir((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pytsetUp s cCs|iiƒdS(N(R tclose(R((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyttearDownscCsò|i}|i}|i}|i}|i}|i}|i}|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒdS(N( RR RRRRRR tadd(RRR RRRRR((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pytaddStuffs       cCsò|i}|i}|i}|i}|i}|i}|i}|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒ|ii|||fƒdS(N( RR RRRRRR tremove(RRR RRRRR((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyt removeStuff/s       cCs|iƒdS(N(R(R((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyttestAdd@scCs|iƒ|iƒdS(N(RR(R((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyt testRemoveCs c Csâ|i}|i}|i}|i}|i}|i}|i}|i}|ii } d} |i ƒ|t t | | ||fƒƒƒdƒ|t t | | ||fƒƒƒdƒ|t t | | ||fƒƒƒdƒ|t t | | ||fƒƒƒdƒ|t t | ||| fƒƒƒdƒ|t t | ||| fƒƒƒdƒ|t t | ||| fƒƒƒdƒ|t t | ||| fƒƒƒdƒ|t t | || |fƒƒƒdƒ|t t | || |fƒƒƒdƒ|t t | || |fƒƒƒdƒ|t t | || |fƒƒƒdƒ|t t | | || fƒƒƒdƒ|t t | | || fƒƒƒdƒ|t t | || | fƒƒƒdƒ|t t | || | fƒƒƒdƒ|t t | || | fƒƒƒdƒ|t t | | | |fƒƒƒdƒ|t t | | | |fƒƒƒdƒ|t t | | | |fƒƒƒdƒ|t t | | | | fƒƒƒdƒ|iƒ|t t | | | | fƒƒƒdƒdS(Niiiiii(RR RRRRRt assertEqualsR ttriplestNoneRtlentlistR( RRR RRRRRtasserteR tAny((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyt testTriplesGsD          ((((((((((((((((((((( cCs³|i}ddkl}tdƒ}tdƒ}||i|i|if|ƒ}|i|ti |fƒ|i ||i |ti ƒƒ|i ||i dti d|ƒƒdS(Niÿÿÿÿ(t Statementshttp://example.org/foo#cshttp://example.org/foo#rt predicatetobject( R trdflib.StatementR'RR RRRRtvalueR(RR R'tctrts((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyttestStatementNode{s   !c Csvddkl}|i}tdƒ}tdƒ}tdƒ}tdƒ}tƒ}|i|ti|fƒ|i|ti|fƒ|i|ti|fƒtƒ}|i|ti|fƒ|i|ti|fƒ|i|ti|fƒ|d|id|ƒ} |d|id|ƒ} |i| ti| fƒ|i| ƒ} |i | | ƒ|i | ti| fƒdS( Niÿÿÿÿ(t GraphValuetaliceRRRRR ( t rdflib.GraphR0R RRRRR+RRR( RR0R R1RRRtg1tg2tgv1tgv2tv((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyttestGraphValue†s(       cCst|i}|iƒ|it|iƒƒtdƒ}tdƒ}|i||i|fƒ|it|iƒƒdS(Ntjeroent unconnected( R RRtTruet connectedRRRtFalse(RR R9R:((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyt testConnected¤s    N(t__name__t __module__R R!R R;tslowtestRRRRRRR&R/R8R>(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyRs       4 tZODBGraphTestCasecBseZdZeZRS(tZODB(R?R@R R=RA(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyRB¸ssCan not test ZODB store: %stRedLandTestCasecBseZdZeZRS(tRedland(R?R@R R=RA(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyRDÂssCan not test Redland store: %st__main__(tunittestttempfileRtrdflibRRRRR2RtTestCaseRt persistentRBt ImportErrorteRERDR?tmain(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/graph.pyts "­   rdflib-2.4.2/test/__init__.pyc0000644000175000017500000000021711164176136015164 0ustar nachonachoÑò ¯Ic@sdS(N((((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/__init__.pytsrdflib-2.4.2/test/rdf.py0000644000175000017500000000310511153616026014027 0ustar nachonachoimport unittest from rdflib import * from rdflib.Graph import Graph from rdflib import RDF from rdflib.StringInputSource import StringInputSource FOAF = Namespace("http://xmlns.com/foaf/0.1/") rdfxml = """\ Donna Fales donna """ class RDFTestCase(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): self.store = Graph(store=self.backend) self.store.open(self.path) self.store.bind("dc", "http://http://purl.org/dc/elements/1.1/") self.store.bind("foaf", "http://xmlns.com/foaf/0.1/") def tearDown(self): self.store.close() def addDonna(self): self.donna = donna = BNode() self.store.add((donna, RDF.type, FOAF["Person"])) self.store.add((donna, FOAF["nick"], Literal("donna"))) self.store.add((donna, FOAF["name"], Literal("Donna Fales"))) def testRDFXML(self): self.addDonna() g = Graph() g.parse(StringInputSource(self.store.serialize(format="pretty-xml"))) self.assertEquals(self.store.isomorphic(g), True) def test_suite(): return unittest.makeSuite(RDFTestCase) if __name__ == '__main__': unittest.main(defaultTest='test_suite') rdflib-2.4.2/test/seq.py0000644000175000017500000000274111153616026014051 0ustar nachonachoimport unittest from rdflib import * from rdflib.Graph import Graph from rdflib.StringInputSource import StringInputSource class SeqTestCase(unittest.TestCase): backend = 'default' path = 'store' def setUp(self): store = self.store = Graph(store=self.backend) store.open(self.path) store.parse(StringInputSource(s)) def tearDown(self): self.store.close() def testSeq(self): items = self.store.seq(URIRef("http://example.org/Seq")) self.assertEquals(len(items), 6) self.assertEquals(items[-1].concrete(), URIRef("http://example.org/six")) self.assertEquals(items[2].concrete(), URIRef("http://example.org/three")) # just make sure we can serialize self.store.serialize() def test_suite(): return unittest.makeSuite(SeqTestCase) if __name__ == '__main__': unittest.main(defaultTest='test_suite') s = """\ """ rdflib-2.4.2/test/sparql/0000755000175000017500000000000011204354476014212 5ustar nachonachordflib-2.4.2/test/sparql/testSPARQL.py0000755000175000017500000000204211153616026016462 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:30 $, by $Author: ivan $, $Revision: 1.1 $ # """ """ import sys, os, time, datetime from rdflib.constants import RDFNS as ns_rdf from rdflib.constants import RDFSNS as ns_rdfs #from rdflib.sparql import ns_dc as ns_dc #from rdflib.sparql import ns_owl as ns_owl from rdflib.sparql.sparql import type_integer from rdflib.sparql.sparql import type_double from rdflib.sparql.sparql import type_float from rdflib.sparql.sparql import type_decimal from rdflib.sparql.sparql import type_dateTime from rdflib.Namespace import Namespace ns_foaf = Namespace("http://xmlns.com/foaf/0.1/") ns_ns = Namespace("http://example.org/ns#") ns_book = Namespace("http://example.org/book") ns_person = Namespace("http://example.org/person#") ns_dt = Namespace("http://example.org/datatype#") ns_dc0 = Namespace("http://purl.org/dc/elements/1.0/") ns_dc = Namespace("http://purl.org/dc/elements/1.1/") ns_vcard = Namespace("http://www.w3.org/2001/vcard-rdf/3.0#") rdflib-2.4.2/test/sparql/README0000755000175000017500000000205411153616026015071 0ustar nachonachoThere are two classes of tests (for now): query and construct, each in its own directory. The structure is identical. Each test is in a different python file (usually Test***.py, where *** refer to the chapter in the SPARQL document). The test is run by the queryTest (resp. constructTest) script. This script does a run-time import of the test file, getting the variables in the Test files, construct and runs the sparql query and displays the result. Each test file has an identical structure: it is a bunch of (python) declaration: - the rdf data itself ("rdfData") it is also possible to refer to an array of external files using the variable "datafiles", but the rdfData takes precedence. If files are used, either set rdfData to None, or do not set it at all - the 'where' and 'optional' clauses, set as GraphPatterns - the 'select' tuple (if applicable) - the 'construct' pattern (if applicable) - for the query case, the expected result should be added in the form of a string Looking at the current examples, the structure is pretty straightforward. rdflib-2.4.2/test/sparql/QueryTestCase.py0000644000175000017500000000103411153616026017316 0ustar nachonachoimport unittest from rdflib.Graph import Graph class QueryTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testUnicodeString(self): from rdflib.sparql.bison import Parse from cStringIO import StringIO q = \ u""" PREFIX rdf: SELECT ?pred WHERE { rdf:foobar rdf:predicate ?pred. } """ p = Parse(q) if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/sparql/QueryTestCase.pyc0000644000175000017500000000253611164176137017477 0ustar nachonachoÑò ¯Ic@sTddkZddklZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿN(tGrapht QueryTestCasecBs#eZd„Zd„Zd„ZRS(cCsdS(N((tself((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/QueryTestCase.pytsetUpscCsdS(N((R((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/QueryTestCase.pyttearDown scCs6ddkl}ddkl}d}||ƒ}dS(Niÿÿÿÿ(tParse(tStringIOuš PREFIX rdf: SELECT ?pred WHERE { rdf:foobar rdf:predicate ?pred. } (trdflib.sparql.bisonRt cStringIOR(RRRtqtp((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/QueryTestCase.pyttestUnicodeString s(t__name__t __module__RRR (((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/QueryTestCase.pyRs  t__main__(tunittestt rdflib.GraphRtTestCaseRR tmain(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/QueryTestCase.pyts  rdflib-2.4.2/test/sparql/ConstructTests/0000755000175000017500000000000011204354476017221 5ustar nachonachordflib-2.4.2/test/sparql/ConstructTests/Test10_21.py0000755000175000017500000000234611153616025021157 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:30:02 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from testSPARQL import ns_vcard from testSPARQL import ns_person from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData = """ Alice """ select = [] pattern = GraphPattern([("?x",ns_foaf["name"],"?name")]) optional = [] construct = GraphPattern([(ns_person["Alice"],ns_vcard["FN"],"?name")]) tripleStore = None rdflib-2.4.2/test/sparql/ConstructTests/Test10_22.py0000755000175000017500000000314111153616025021152 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:30:02 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from testSPARQL import ns_vcard from testSPARQL import ns_person from rdflib.Literal import Literal from rdflib import BNode from rdflib.sparql.sparql import PatternBNode from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData = """ Alice Hacker Bob Hacker """ select = [] pattern = GraphPattern([("?x",ns_foaf["givenname"],"?name"),("?x",ns_foaf["family_name"],"?fname")]) optional = [] bnode = BNode("v") #PatternBNode("") construct = GraphPattern([("?x", ns_vcard["N"],bnode),(bnode,ns_vcard["givenName"],"?name"),(bnode,ns_vcard["familyName"],"?fname")]) tripleStore = None rdflib-2.4.2/test/sparql/ConstructTests/Test10_23.py0000755000175000017500000000252111153616025021154 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:30:02 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from testSPARQL import ns_vcard from testSPARQL import ns_person from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData = """ Alice Bob """ select = [] pattern = GraphPattern([("?x",ns_foaf["name"],"?name")]) optional = [] construct = None tripleStore = None rdflib-2.4.2/test/sparql/ConstructTests/constuctTest.py0000755000175000017500000000474411153616025022303 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:30:02 $, by $Author: ivan $, $Revision: 1.1 $ # """ """ import sys, os, time, datetime, imp, sys, StringIO sys.path.insert(0,"../") from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_foaf from testSPARQL import ns_vcard from testSPARQL import ns_person from rdflib.sparql import sparqlGraph from rdflib.FileInputSource import FileInputSource tests = { 1021: "Test10_21", 1022: "Test10_22", 1023: "Test10_23", } Debug = False def run(modName) : # Import the python module defs = None (fl,realpath,descr) = imp.find_module(modName,["."]) mod = imp.load_module(modName,fl,realpath,descr) defs = mod.__dict__ ################################################## # Two ways of identifying the RDF data: # 1. A Triple Store generated in the module graph = None try : graph = defs["graph"] except : pass # 2. Directly in the test module as a string rdfData = None try : rdfData = defs["rdfData"] except : pass # Get the final of the triple store... if graph == None : stream = FileInputSource(StringIO.StringIO(rdfData)) graph = sparqlGraph.SPARQLGraph() graph.parse(stream,format="xml") ############################################### # Retrive the query data pattern = defs["pattern"] optPattern = defs["optional"] construct = defs["construct"] ############################################### print "\n============= Test Module: %s =============" % modName results = graph.queryObject(pattern,optPattern) graph = results.construct(construct) graph.serialize("output.rdf") print "=== generated RDF file (output.rdf):\n" for l in file("output.rdf") : sys.stdout.write(l) if __name__ == '__main__' : if len(sys.argv) == 1 : #print "Usage: %s modname1 modname2 ..." % sys.argv[0] for mod in tests.values(): run(mod) else : for mod in sys.argv[1:] : if mod.endswith(".py") : run(mod[0:-3]) else : run(mod) rdflib-2.4.2/test/sparql/__init__.pyc0000644000175000017500000000022611164176137016467 0ustar nachonachoÑò ¯Ic@sdS(N((((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql/__init__.pytsrdflib-2.4.2/test/sparql/__init__.py0000644000175000017500000000000211153616026016306 0ustar nachonacho# rdflib-2.4.2/test/sparql/QueryTests/0000755000175000017500000000000011204354476016342 5ustar nachonachordflib-2.4.2/test/sparql/QueryTests/Test6_11.py0000755000175000017500000000313411153616026020221 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData =""" SPARQL Query Language Tutorial Alice SPARQL Protocol Tutorial Bob """ select = ["?title"] patt1 = GraphPattern([("?book",ns_dc0["title"],"?title")]) patt2 = GraphPattern([("?book",ns_dc["title"],"?title")]) pattern = [patt1,patt2] optional = [] tripleStore = None expected = ''' ?title: SPARQL Query Language Tutorial ?title: SPARQL Protocol Tutorial ''' rdflib-2.4.2/test/sparql/QueryTests/Test6_12.py0000755000175000017500000000314411153616026020223 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData =""" SPARQL Query Language Tutorial Alice SPARQL Protocol Tutorial Bob """ select = ["?x","?y"] patt1 = GraphPattern([("?book",ns_dc0["title"],"?x")]) patt2 = GraphPattern([("?book",ns_dc["title"],"?y")]) pattern = [patt1,patt2] optional = [] tripleStore = None expected = ''' ?x: SPARQL Query Language Tutorial ?y: None ?x: None ?y: SPARQL Protocol Tutorial ''' rdflib-2.4.2/test/sparql/QueryTests/Test1.py0000755000175000017500000000214711153616026017716 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from rdflib.sparql.graphPattern import GraphPattern # Careful to keep the Johny Lee Outlaw """ select = ["?mbox","?junk"] pattern = GraphPattern([("?x",ns_foaf["name"],"Johny Lee Outlaw"),("?x",ns_foaf["mbox"],"?mbox")]) optional = None tripleStore = None expected = ''' ?mbox: mailto:jlow@example.com ?junk: None ''' rdflib-2.4.2/test/sparql/QueryTests/Test2_5.py0000755000175000017500000000336611153616026020147 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.graphPattern import GraphPattern rdfData =""" Johny Lee Outlaw Peter Goodguy """ select = ["?name", "?mbox"] pattern = GraphPattern([("?x", ns_foaf["name"],"?name"),("?x",ns_foaf["mbox"],"?mbox")]) optional = [] tripleStore = None expected = ''' ?name: Johny Lee Outlaw ?mbox: mailto:jlow@example.com ?name: Peter Goodguy ?mbox: mailto:peter@example.org ''' rdflib-2.4.2/test/sparql/QueryTests/Test2_6.py0000755000175000017500000000322211153616026020137 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ rdfData =""" Alice Bob """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.graphPattern import GraphPattern select = ["?x", "?name"] pattern = GraphPattern([("?x", ns_foaf["name"],"?name")]) optional = [] tripleStore = None expected = ''' ?x: (some bnode) ?name: Alice ?x: (some bnode) ?name: Bob ''' rdflib-2.4.2/test/sparql/QueryTests/Test3_2.py0000755000175000017500000000374411153616026020145 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData =""" SPARQL Tutorial 42 The Semantic Web 23 The Semantic Web Old 2000-03-12 The Semantic Web New 2005-03-02 """ select = ["?title", "?price"] pattern = GraphPattern([("?x", ns_dc["title"],"?title"),("?x",ns_ns["price"],"?price")]) pattern.addConstraint(lt("?price",30)) optional = [] tripleStore = None expected = ''' ?title: The Semantic Web ?price: 23 ''' rdflib-2.4.2/test/sparql/QueryTests/Test5_1.py0000755000175000017500000000321211153616026020134 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.graphPattern import GraphPattern rdfData =""" Alice Bob """ select = ["?name", "?mbox"] pattern = GraphPattern([("?x", ns_foaf["name"],"?name")]) #optional = None optional = GraphPattern([("?x",ns_foaf["mbox"],"?mbox")]) tripleStore = None expected = ''' ?name: Alice ?mbox: mailto:alice@work.example ?name: Bob ?mbox: None ''' rdflib-2.4.2/test/sparql/QueryTests/Test5_2.py0000755000175000017500000000313311153616026020137 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.sparqlOperators import lt, ge import datetime from rdflib.sparql.graphPattern import GraphPattern thresholdDate = datetime.date(2005,01,01) rdfData =""" SPARQL Tutorial 42 The Semantic Web 23 """ select = ["?title", "?price"] pattern = GraphPattern([("?x", ns_dc["title"],"?title")]) optional = GraphPattern([("?x",ns_ns["price"],"?price")]) optional.addConstraint(lt("?price",30)) tripleStore = None expected = ''' ?title: SPARQL Tutorial ?price: None ?title: The Semantic Web ?price: 23 ''' rdflib-2.4.2/test/sparql/QueryTests/Test5_3.py0000755000175000017500000000353011153616026020141 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.graphPattern import GraphPattern rdfData =""" Alice Bob """ select = ["?name", "?mbox", "?hpage"] pattern = GraphPattern([("?x", ns_foaf["name"],"?name")]) #optional = None optional = [ GraphPattern([("?x",ns_foaf["mbox"],"?mbox")]), GraphPattern([("?x",ns_foaf["homepage"],"?hpage")]) ] tripleStore = None expected = ''' ?name: Alice ?mbox: None ?hpage: http://work.example.org ?name: Bob ?mbox: mailto:bob@work.example ?hpage: None ''' rdflib-2.4.2/test/sparql/QueryTests/Test3_1_1.py0000755000175000017500000000313511153616026020356 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ rdfData =""" 42 abc 2005-02-27 cat """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from rdflib.Literal import Literal import datetime from rdflib.sparql.graphPattern import GraphPattern select = ["?v"] pattern = GraphPattern([("?v","?p",42)]) optional = [] tripleStore = None expected = ''' ?v : (some bnode id) ''' rdflib-2.4.2/test/sparql/QueryTests/Test3_1_2.py0000755000175000017500000000330511153616026020356 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ rdfData =""" 42 abc 2005-02-27 cat """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from rdflib.Literal import Literal import datetime from rdflib.sparql.graphPattern import GraphPattern select = ["?v"] #pattern = GraphPattern([("?v","?p",Literal("abc",datatype="http://example.org/datatype#specialDatatype"))]) pattern = GraphPattern([("?v","?p","abc")]) optional = [] tripleStore = None expected = ''' EMPTY ''' rdflib-2.4.2/test/sparql/QueryTests/Test3_1_3.py0000755000175000017500000000312211153616026020354 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns rdfData =""" 42 abc 2005-02-27 cat """ from rdflib.Literal import Literal import datetime from rdflib.sparql.graphPattern import GraphPattern select = ["?v"] pattern = GraphPattern([("?v","?p","cat")]) optional = [] tripleStore = None expected = ''' EMPTY ''' rdflib-2.4.2/test/sparql/QueryTests/Test3_1_4.py0000755000175000017500000000316311153616026020362 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ rdfData =""" 42 abc 2005-02-27 cat """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from rdflib.Literal import Literal import datetime from rdflib.sparql.graphPattern import GraphPattern select = ["?v"] pattern = GraphPattern([("?v","?p",Literal("cat",lang="en"))]) optional = [] tripleStore = None expected = ''' ?v : (some Bnode id) ''' rdflib-2.4.2/test/sparql/QueryTests/Test11_3.py0000755000175000017500000000335711153616026020225 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ Datatype test. Note that this is not 100% kosher. The problem is that the Literal of rdflib does not check the datatypes. In theory, if the data contains: x ns:p 42. instead of: x ns:p 42^^http://www.w3.org/2001/XMLSchema#integer the query should return no results, because the first object is of datatype string. However, Literal does not implement this... """ rdfData =""" Alice Bob bob@work.example """ from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_foaf from testSPARQL import ns_ns from testSPARQL import ns_book from rdflib.Literal import Literal from rdflib.sparql.graphPattern import GraphPattern from rdflib.sparql.sparqlOperators import isURI select = ["?name", "?mbox"] pattern = GraphPattern([("?x", ns_foaf["name"],"?name"),("?x",ns_foaf["mbox"],"?mbox")]) pattern.addConstraint(isURI("?mbox")) optional = [] tripleStore = None expected = ''' ?name: Alice ?mbox: mailto:alice@work.example ''' rdflib-2.4.2/test/sparql/QueryTests/queryTest.py0000755000175000017500000000634211153616026020724 0ustar nachonacho#!/d/Bin/Python/python.exe # -*- coding: utf-8 -*- # # # $Date: 2005/04/02 07:29:46 $, by $Author: ivan $, $Revision: 1.1 $ # """ """ import sys, os, time, datetime, imp, sys, StringIO sys.path.insert(0,"../") from rdflib import sparql from rdflib.sparql import sparqlGraph from testSPARQL import ns_rdf from testSPARQL import ns_rdfs from testSPARQL import ns_dc from testSPARQL import ns_dc0 from testSPARQL import ns_foaf from rdflib.FileInputSource import FileInputSource def run(modName) : # Import the python module defs = None (fl,realpath,descr) = imp.find_module(modName,["."]) mod = imp.load_module(modName,fl,realpath,descr) defs = mod.__dict__ ################################################## # Three ways of identifying the RDF data: # 1. A Triple Store generated in the module tripleStore = None try : tripleStore = defs["tripleStore"] except : pass # 2. A reference to a set of RDF Files fils = None try : fils = defs["datafiles"] except : pass # 3. Directly in the test module as a string rdfData = None try : rdfData = defs["rdfData"] except : pass # Get the final of the triple store... if tripleStore == None : if rdfData == None : tripleStore = retrieveRDFFiles(fils) else : stream = StringIO.StringIO(rdfData) tripleStore = sparqlGraph.SPARQLGraph() tripleStore.parse(FileInputSource(stream),format="xml") ############################################### # Retrive the query data pattern = defs["pattern"] optPattern = defs["optional"] select = defs["select"] ############################################### print "\n============= Test Module: %s =============" % modName # better test modules describe their expected results... try : expected = defs["expected"] print "expected: %s" % expected print "=======\n" except : pass # Run the query and print the results results = tripleStore.query(select,pattern,optPattern) num = len(results) print "Number of hits: %d" % num print for i in range(0,num) : hit = results[i] if len(select) == 1 : print "%s: %s" % (select[0],hit) else : for j in range(0,len(select)) : var = select[j] val = hit[j] print "%s: %s" % (var,val) print if __name__ == '__main__' : if len(sys.argv) == 1 : print "Usage: %s modname1 modname2 ..." % sys.argv[0] else : for mod in sys.argv[1:] : if mod.endswith(".py") : run(mod[0:-3]) else : run(mod) rdflib-2.4.2/test/Sleepycat.py0000644000175000017500000000110711153616026015205 0ustar nachonachofrom test.graph import GraphTestCase try: from rdflib.store.Sleepycat import Sleepycat class SleepycatGraphTestCase(GraphTestCase): store_name = "Sleepycat" except ImportError, e: _logger.warning("Can not test Sleepycat store: %s" % e) from test.context import ContextTestCase try: from rdflib.store.Sleepycat import Sleepycat class SleepycatStoreTestCase(ContextTestCase): store = "Sleepycat" except ImportError, e: _logger.warning("Can not test Sleepycat store: %s" % e) #class Sleepycat(PychinkoTestCase): # backend = 'Sleepycat' rdflib-2.4.2/test/ntriples.pyc0000644000175000017500000002370511164176136015274 0ustar nachonachoÑò ¯Ic@s dZddkZdZdZdedZeidƒZeidƒZeid ƒZeid ƒZ eieƒZ eid ƒZ eieeƒZ d Z eZd efd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZdefd„ƒYZhdd6dd6dd6dd6d d 6Zeid!ƒZeid"ƒZeid#ƒZd$„Zep d%„Zneid&ƒZd'„Zep d(„Znd)efd*„ƒYZ d+„Z!d,„Z"e#d-jo e"ƒndS(.s1 N-Triples Parser License: GPL 2; share and enjoy! Author: Sean B. Palmer, inamidst.com Documentation: http://inamidst.com/proj/rdf/ntriples-doc Command line usage: ./ntriples.py - parses URI as N-Triples ./ntriples.py --help - prints out this help message # @@ fully empty document? iÿÿÿÿNs<([^:]+:[^\s"<>]+)>s"([^"\\]*(?:\\.[^"\\]*)*)"s (?:@([a-z]+(?:-[a-z0-9]+)*)|\^\^s)?s([^\r\n]*)(?:\r\n|\r|\n)s[ \t]*s[ \t]+s[ \t]*\.[ \t]*s_:([A-Za-z][A-Za-z0-9]*)itNodecBseZRS((t__name__t __module__(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR stURIcBseZRS((RR(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR"stbNodecBseZRS((RR(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR#stLiteralcBseZddd„ZRS(cCs2t|ƒdt|ƒd|}ti||ƒS(Nt (tstrtunicodet__new__(tclstlittlangtdtypetn((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR %s"N(RRtNoneR (((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR$stSinkcBseZd„Zd„ZRS(cCs d|_dS(Ni(tlength(tself((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyt__init__*scCs|id7_dS(Ni(R(Rtstpto((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyttriple-s(RRRR(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR)s t ParseErrorcBseZRS((RR(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR0ss tts Rs trt"s\s([\x20\x21\x23-\x5B\x5D-\x7E]+)s\\(t|n|r|"|\\)s!\\u([0-9A-F]{4})|\\U([0-9A-F]{8})cCsng}xR|oJti|ƒ}|o-||iƒ}|i|idƒƒq nti|ƒ}|o+|d}|it|idƒƒq nti|ƒ}|op||iƒ}|iƒ\}}t |p|dƒ}|djot d|ƒ‚n|it |ƒƒq |i dƒot d|d ƒ‚q t d |d ƒ‚q Wt d i|ƒƒS( sUnquote an N-Triples string.iiiiÿÿsDisallowed codepoint: %08Xs\sIllegal escape at: %s...i sIllegal literal character: %rit(tr_safetmatchtendtappendtgrouptr_quottquott r_uniquottgroupstintRtunichrt startswithRtjoin(RtresulttmtutUt codepoint((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pytunquote7s2  cCs |idƒS(Nsunicode-escape(tdecode(R((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR/Uss ([\x80-\xFF])cCstid„|ƒS(NcSsdt|idƒƒS(s%%%02Xi(tordR!(R+((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyt[s(tr_hibytetsub(turi((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyturiquoteZscCs|S(N((R5((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR6]stNTriplesParsercBs†eZdZdd„Zd„Zd„Zd„Zd„Zd„Z d„Z d„Z d „Z d „Z d „Zd „Zd „ZRS(sAn N-Triples Parser. Usage: p = NTriplesParser(sink=MySink()) sink = p.parse(f) # file; use parsestring for a string cCs*|dj o ||_n tƒ|_dS(N(RtsinkR(RR8((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyRgs  cCs£t|dƒptdƒ‚n||_d|_xgto_|iƒ|_|idjoPny|iƒWq5tj otd|iƒ‚q5Xq5W|i S(sParse f as an N-Triples file.treads)Item to parse must be a file-like object.RsInvalid line: %rN( thasattrRtfiletbuffertTruetreadlinetlineRt parselineR8(Rtf((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pytparsels  cCsdt|tƒptdƒ‚nddkl}|ƒ}|i|ƒ|idƒ|i|ƒdS(sParse s as an N-Triples string.s(Item to parse must be a string instance.iÿÿÿÿ(tStringIOiN(t isinstancet basestringRt cStringIORCtwritetseekRB(RRRCRA((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyt parsestring{s   cCs½|ip+|iitƒ}|pdS||_nxtoyti|iƒ}|o$|i|iƒ|_|i dƒS|iitƒ}|pt dƒ‚n|i|7_q8WdS(s+Read an N-Triples line from buffered input.is EOF in lineN( R<R;R9tbufsizRR=tr_lineRRR!R(RR<R+((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR>…s  cCs¯|itƒ|i p|iidƒodS|iƒ}|itƒ|iƒ}|itƒ|iƒ}|itƒ|iot dƒ‚n|i i |||ƒdS(Nt#sTrailing garbage( teattr_wspaceR?R(tsubjectt r_wspacest predicatetobjecttr_tailRR8R(RRORQRR((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR@™s        cCs|ii|ƒS(N(R?R((Rttoken((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pytpeek«scCsG|i|iƒ}|ptd|ƒ‚n|i|iƒ|_|S(NsFailed to eat %s(RR?RR(RtpatternR+((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyRM®s cCs4|iƒp |iƒ}|ptdƒ‚n|S(Ns Subject must be uriref or nodeID(turireftnodeidR(Rtsubj((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyROµscCs'|iƒ}|ptdƒ‚n|S(NsPredicate must be uriref(RWR(Rtpred((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyRQ¼s cCsA|iƒp|iƒp |iƒ}|ptdƒ‚n|S(NsUnrecognised object type(RWRXtliteralR(Rtobjt((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyRRÂs&cCsO|idƒo;|itƒidƒ}t|ƒ}t|ƒ}t|ƒStS(NtR@RURMRORQRRRWRXR[(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyR7`s           cCsMddk}tƒ}|i|ƒ}|i|ƒ}|iƒdG|iGHdS(NiÿÿÿÿsLength of input:(turllibR7turlopenRBtcloseR(R5RdtparserR,R8((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pytparseURIÞs    cCs@ddk}t|iƒdjot|idƒntGHdS(Niÿÿÿÿii(tsystlentargvRhRc(Ri((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pytmainès t__main__($RctreRWR[tlitinfotcompileRKRNRPRSR^RaRbRJR_tvalidateRRRRRRRRt ExceptionRR#RR"R$R/R3R6R7RhRlR(((s8/Users/eikeon/rdflib-svn/branches/2.4.x/test/ntriples.pyt sF )    ~  rdflib-2.4.2/test/leaves.py0000644000175000017500000000154611153616026014542 0ustar nachonachoimport unittest import doctest data = """ @prefix foaf: . @prefix : . :a foaf:knows :b . :a foaf:knows :c . :a foaf:knows :d . :b foaf:knows :a . :b foaf:knows :c . :c foaf:knows :a . """ query = """ PREFIX foaf: select distinct ?person where { ?person foaf:knows ?a . ?person foaf:knows ?b . filter (?a != ?b) . } """ #g = CG() from StringIO import StringIO #g.parse(StringIO(data), format='n3') #print g.query(q).serialize('json') def test_leaves(): return DocFileSuite("leaves.txt", package="rdflib", optionflags = doctest.ELLIPSIS, globs=locals()) if __name__ == "__main__": doctest.testfile("leaves.txt", globs=globals(), optionflags = doctest.ELLIPSIS) rdflib-2.4.2/test/comparison_graph0000644000175000017500000000742511153616026016171 0ustar nachonacho#!/usr/bin/python """ This is not an automated test program-- it generates a viewable graph that shows the clusters of equal objects. """ import os, datetime from rdflib import Literal, URIRef, BNode, Namespace import yapgvb import rdflib XSD = Namespace("http://www.w3.org/2001/XMLSchema#") EX = Namespace("http://example.com/") pyValues = '''\ u"foo" "foo" Literal("foo") Literal("foo", datatype=XSD['string']) Literal("foo", datatype=EX['bar']) Literal("foo", "fr") URIRef("foo") BNode("foo") Literal("5") Literal("5", datatype=XSD['decimal']) Literal("5", datatype=XSD['string']) 5 5.0 "5" 4.99999999999999999 Literal("4.99999999999999999") Literal("4.99999999999999999", datatype=XSD['decimal']) False "False" "false" Literal("False") Literal("false", datatype=XSD['boolean']) Literal(False) Literal(False, datatype=XSD['boolean']) Literal(0, datatype=XSD['boolean']) Literal("http://example.com/foo/") URIRef("http://example.com/foo/") URIRef("http://example.com/foo%2F") "2006-04-15" Literal("2006-04-15") Literal("2006-04-15", datatype=XSD['date']) Literal("2006-04-15Z", datatype=XSD['date']) Literal("2006-04-15-00:00", datatype=XSD['date']) datetime.date(2006, 4, 15) '''.strip().splitlines() # http://www.w3.org/TR/xmlschema-2/#date says these are the same pyValues.extend([ 'Literal("2002-10-10+13:00", datatype=XSD["date"])', 'Literal("2002-10-09-11:00", datatype=XSD["date"])' ]) # see also http://www.w3.org/2001/sw/DataAccess/tests/#date-1 import cgitb cgitb.enable(format='txt') gv = yapgvb.Graph('rdflib comparisons', strict=True) #gv.defaultdist = 100 #gv.mindist = 500 #gv.nodesep = 2 #gv.size = [8, 8] #gv.pack = "false" gv.sep = .5 #gv.overlap = "false" gv.add_node("using rdflib version %s" % rdflib.__version__, color="green") nodes = [(py, eval(py)) for py in pyValues] gvNode = {} for py, val in nodes: label = py label = label.replace('99999999999999999', '99...') label = label.replace('datatype=', 'dt=') n = gvNode[py] = gv.add_node(label) n.shape = 'box' n.fontsize = 10 n.margin = .0005, .00002 for i, (yPy, yVal) in enumerate(nodes): for j, (xPy, xVal) in enumerate(nodes): true = xVal == yVal if not true and not (xVal != yVal): # neither equal nor nonequal! edge = gv.add_edge(gvNode[xPy], gvNode[yPy]) edge.label = "__ne__ failed" edge.color = "red" if true: edge = gv.add_edge(gvNode[xPy], gvNode[yPy]) edge.label = "=" edge.style = "bold" edge.len = 2.5 if xPy == yPy: # try to shorten the self-loops edge.headport = 'w' edge.tailport = 'w' edge.weight = .2 edge.len = .2 gv.layout(yapgvb.engines.circo) out = "/tmp/comparison_graph.png" gv.render(out) print "wrote: %s" % out raise SystemExit # this part makes an html table, but the graph is much prettier from nevow import flat, tags as T rows = [T.tr[T.th, [T.th[i] for i in range(len(nodes))]]] for i, (yPy, yVal) in enumerate(nodes): cols = [T.th["%s) " % i, yPy]] for j, (xPy, xVal) in enumerate(nodes): true = xVal == yVal symbol = T.xml("≠") if true: symbol = "=" cols.append(T.td(class_=str(true))[symbol]) rows.append(T.tr[cols]) print flat.flatten( T.html[ T.head[ T.style[''' table { border: 1px solid black; border-collapse: collapse; } td, th { border: 1px solid black; } td { width: 1em; text-align: center; } th { text-align: left; } .True { background: #92E892; } .False { color: gray; } '''] ], T.body[ T.h1["Comparisons using rdflib %s" % rdflib.__version__], T.table[rows]]]) rdflib-2.4.2/test/test_not_equals.py0000644000175000017500000000173311153616026016472 0ustar nachonachofrom rdflib.Namespace import Namespace from rdflib import plugin,RDF,RDFS,URIRef, StringInputSource, Literal from rdflib.Graph import Graph,ReadOnlyGraphAggregate,ConjunctiveGraph import sys from pprint import pprint def testSPARQLNotEquals(): NS = u"http://example.org/" graph = ConjunctiveGraph() graph.parse(StringInputSource(""" @prefix : . @prefix rdf: <%s> . :foo rdf:value 1. :bar rdf:value 2."""%RDF.RDFNS), format="n3") rt = graph.query("""SELECT ?node WHERE { ?node rdf:value ?val. FILTER (?val != 1) }""", initNs={'rdf':RDF.RDFNS}, DEBUG=False) for row in rt: item = row[0] assert item == URIRef("http://example.org/bar") if __name__ == '__main__': testSPARQLNotEquals() rdflib-2.4.2/test/util.py0000644000175000017500000000071311153616026014233 0ustar nachonachoimport unittest from rdflib import Literal from rdflib.store.NodePickler import NodePickler class UtilTestCase(unittest.TestCase): def test_to_bits_from_bits_round_trip(self): np = NodePickler() a = Literal(u'''A test with a \\n (backslash n), "\u00a9" , and newline \n and a second line. ''') b = np.loads(np.dumps(a)) self.assertEquals(a, b) if __name__ == '__main__': unittest.main(defaultTest='test_suite') rdflib-2.4.2/test/test_issue_45.pyc0000644000175000017500000000541111164177226016126 0ustar nachonachoÑò ¯Ic@sšddkZddklZddklZddklZddk l Z dei fd„ƒYZ de fd „ƒYZ e d joeiƒndS( iÿÿÿÿN(tConjunctiveGraph(t Namespace(tAlgebra(tStringIOt TestSparqlASKcBs,eZd„Zd„Zd„Zd„ZRS(cCsHtƒ|_tdƒ}|ii|ddƒtit|_t_dS(NsŽ @prefix rdfs: . @prefix : . :Foo a rdfs:Class . :bar a :Foo . tformattn3(tGraphtgraphRtloadRtDAWG_DATASET_COMPLIANCEtFalsetcompliance_setting(tselftio((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pytsetUp s   cCs|it_dS(N(R RR (R ((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyttearDownscCs/|iidƒ}|i|itgdƒdS(sQ Ask for a triple that exists, assert that the response is True. sZASK { a } s5The answer should have been that the triple was foundN(Rtqueryt assertEqualst askAnswertTrue(R tres((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyt test_ask_truescCs/|iidƒ}|i|itgdƒdS(sZ Ask for a triple that does not exist, assert that the response is False. sZASK { a } s9The answer should have been that the triple was not foundN(RRRRR (R R((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyttest_ask_false&s(t__name__t __module__RRRR(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyR s   tTestSparqlASKWithCompliancecBseZd„ZRS(cCsti|ƒtt_dS(N(RRRRR (R ((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyR.s (RRR(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyR-st__main__(tunittestt rdflib.GraphRRtrdflib.NamespaceRtNSt rdflib.sparqlRRtTestCaseRRRtmain(((s=/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_issue_45.pyts # rdflib-2.4.2/test/store_performace.py0000644000175000017500000000641711153616026016624 0ustar nachonachoimport unittest from rdflib.Graph import Graph from rdflib import URIRef import gc import itertools from time import time from random import random from tempfile import mkdtemp def random_uri(): return URIRef("%s" % random()) class StoreTestCase(unittest.TestCase): """ Test case for testing store performance... probably should be something other than a unit test... but for now we'll add it as a unit test. """ store = 'default' def setUp(self): self.gcold = gc.isenabled() gc.collect() gc.disable() self.graph = Graph(store=self.store) if self.store == "MySQL": from test.mysql import configString from rdflib.store.MySQL import MySQL path=configString MySQL().destroy(path) else: path = a_tmp_dir = mkdtemp() self.graph.open(path, create=True) self.input = input = Graph() input.parse("http://eikeon.com") def tearDown(self): self.graph.close() if self.gcold: gc.enable() # TODO: delete a_tmp_dir del self.graph def testTime(self): number = 1 print self.store print "input:", for i in itertools.repeat(None, number): self._testInput() print "random:", for i in itertools.repeat(None, number): self._testRandom() print "." def _testRandom(self): number = len(self.input) store = self.graph def add_random(): s = random_uri() p = random_uri() o = random_uri() store.add((s, p, o)) it = itertools.repeat(None, number) t0 = time() for _i in it: add_random() t1 = time() print "%.3g" % (t1 - t0), def _testInput(self): number = 1 store = self.graph def add_from_input(): for t in self.input: store.add(t) it = itertools.repeat(None, number) t0 = time() for _i in it: add_from_input() t1 = time() print "%.3g" % (t1 - t0), class MemoryStoreTestCase(StoreTestCase): store = "Memory" try: from rdflib.store.Sleepycat import Sleepycat class SleepycatStoreTestCase(StoreTestCase): store = "Sleepycat" except ImportError, e: print "Can not test Sleepycat store:", e try: import persistent # If we can import persistent then test ZODB store class ZODBStoreTestCase(StoreTestCase): non_standard_dep = True store = "ZODB" except ImportError, e: print "Can not test ZODB store:", e try: import RDF # If we can import RDF then test Redland store class RedLandTestCase(StoreTestCase): non_standard_dep = True store = "Redland" except ImportError, e: print "Can not test Redland store:", e # TODO: add test case for 4Suite backends? from Ft import Rdf try: # import todo # what kind of configuration string does open need? import MySQLdb,sha,sys # If we can import RDF then test Redland store class MySQLTestCase(StoreTestCase): non_standard_dep = True store = "MySQL" except ImportError, e: print "Can not test MySQL store:", e if __name__ == '__main__': unittest.main() rdflib-2.4.2/test/test_sparql_graph_graph_pattern.pyc0000644000175000017500000000371211164772154022072 0ustar nachonachoÑò kôÓIc@s’ddklZddklZlZddklZddkZdZedƒZdZ dei fd „ƒYZ e d joei ƒndS( iÿÿÿÿ(tConjunctiveGraph(tURIReftLiteral(tStringIONs @prefix foaf: . foaf:name "Chime"; a foaf:Person. foaf:knows ,. foaf:name "Ivan".shttp://eikeon.com/uÑ PREFIX foaf: SELECT ?X WHERE { ?P a foaf:Person . ?X foaf:knows ?P . OPTIONAL { ?X foaf:knows ?OP . ?OP foaf:name "Judas" } FILTER (!bound(?OP)) }tTestSparqlOPT_FILTER2cBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtloadRt testContent(tself((sO/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_graph_graph_pattern.pytsetUps cCs`|iitdtƒiddƒ}t|ƒ}|i|tgjdtgt|ƒfƒdS(NtDEBUGRtpythonsexpecting : %s . Got: %s( RtquerytQUERYtFalset serializetlistt failUnlesstdoc1trepr(R tresults((sO/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_graph_graph_pattern.pyttest_OPT_FILTER s   (t__name__t __module__R R(((sO/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_graph_graph_pattern.pyRs t__main__(t rdflib.GraphRtrdflibRRRtunittestR RRtTestCaseRRtmain(((sO/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_graph_graph_pattern.pyts   rdflib-2.4.2/test/test_sparql_graph_graph_pattern.py~0000644000175000017500000000230411153616026022112 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef, Literal, RDFS from StringIO import StringIO import unittest testContent = """ @prefix foaf: . foaf:name "Chime"; a foaf:Person. foaf:knows ,. foaf:name "Ivan".""" doc1 = URIRef("http://eikeon.com/") QUERY = u""" PREFIX foaf: SELECT ?X WHERE { ?P a foaf:Person . ?X foaf:knows ?P . OPTIONAL { ?X foaf:knows ?OP . ?OP foaf:name "Judas" } FILTER (!bound(?OP)) }""" class TestSparqlOPT_FILTER2(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.load(StringIO(testContent), format='n3') def test_OPT_FILTER(self): results = self.graph.query(QUERY, DEBUG=False).serialize(format='python') results = list(results) self.failUnless( results == [doc1], "expecting : %s . Got: %s"%([doc1],repr(results))) if __name__ == "__main__": unittest.main()rdflib-2.4.2/test/context.pyc0000644000175000017500000002672511164176136015125 0ustar nachonachoÑò ¯Ic@sBddkZddklZddkTddklZdeifd„ƒYZy&ddkZdefd„ƒYZ Wne j oZ d e GHnXy&ddk Z d efd „ƒYZ Wne j oZ d e nXy&ddkZd efd„ƒYZWne j oZ de GHnXedjoeiƒndS(iÿÿÿÿN(tmkdtemp(t*(tGraphtContextTestCasecBs¤eZdZeZd„Zd„Zd„Zd„Zd„Z d„Z d„Z d„Z d „Z d „Zd „Zd „Zd „Zd„Zd„Zd„ZRS(tdefaultcCstd|iƒ|_|idjo:ddkl}ddkl}|}|ƒi|ƒntƒ}}|ii |dt ƒt dƒ|_ t dƒ|_ t d ƒ|_t d ƒ|_t d ƒ|_t d ƒ|_t d ƒ|_t dƒ|_t dƒ|_|iidƒdS(NtstoretMySQLiÿÿÿÿ(t configString(Rtcreateumichelutarekubobulikesuhatesupizzaucheeseu context-1u context-2(NNN(tConjunctiveGraphRtgraphtmysqlRtrdflib.store.MySQLRtdestroyRtopentTruetURIReftmichelttarektbobtlikesthatestpizzatcheesetc1tc2tremovetNone(tselfRRtpatht a_tmp_dir((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pytsetUp s$ cCs|iiƒdS(N(R tclose(R((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttearDown$scCsOt|tƒp t|tƒptt|ƒ‚td|iid|d|ƒS(NRt identifiertnamespace_manager(t isinstanceRtBNodetAssertionErrorttypeRR R(RR"((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt get_context's c Csû|i}|i}|i}|i}|i}|i}|i}|i}t|i i |ƒ} | i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒdS(N( RRRRRRRRRR Rtadd( RRRRRRRRRR ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pytaddStuff,s         c Csû|i}|i}|i}|i}|i}|i}|i}|i}t|i i |ƒ} | i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒ| i |||fƒdS(N( RRRRRRRRRR RR( RRRRRRRRRR ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt removeStuff?s         cCs‚|i}|i}|i|i|if}|ii|ƒt|ii|ƒ}|i|ƒt|ii|ƒ}|i|ƒdS(N( RRRRRR R)RR(RRRttripleR ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pytaddStuffInMultipleContextsRs   cCsj|iƒ|i|i|if}t|ii|iƒ}|i|ƒ|it |iƒt |ƒƒdS(N( R-RRRR RRR)t assertEqualstlen(RR,R ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestConjunction`s   cCs|iƒdS(N(R*(R((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestAddhscCs|iƒ|iƒdS(N(R*R+(R((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt testRemoveks cCs|i}|ii|i|ƒƒt|ii|ƒ}t|iƒ}x6tddƒD]%}|it ƒ|i |i fƒqVW|i t|ƒ|dƒ|i t|i|ƒƒ|dƒ|ii|i|ƒƒ|i t|iƒ|ƒ|i t|ƒdƒdS(Nii ( RR tremove_contextR(RRR/trangeR)R%RR.(RRR toldLenti((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestLenInOneContextos ##cCslt|iƒ}|iƒ|it|iƒ|dƒt|ii|iƒ}|it|ƒ|dƒdS(Ni(R/R R-R.RRR(RR5R ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestLenInMultipleContextss  cCs|i}|i}|i|i|if}|iƒ|i||ijƒt|ii |ƒ}|i |ƒ|i||ijƒt|ii |ƒ}|i |ƒ|i||ijƒ|ii |ƒ|i||ijƒ|iƒ|ii |ƒ|i||ijƒdS(N( RRRRRR-tassert_R RRR(RRRR,R ((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestRemoveInMultipleContextsŠs       cCsÌ|i|i|if}|iƒd„}|i|it||iiƒƒjƒ|i|i t||iiƒƒjƒt|t |ii|ƒƒƒ}|i|i|jƒ|i|i |jƒdS(NcSs|iS(N(R"(tc((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pytcid¦s( RRRR-R9RtmapR tcontextsRtlist(RR,R<t contextList((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt testContexts¢s  ((!cCs“|i}|iƒ|itt|ii|ƒƒdƒ|it|i|ƒƒdƒ|ii|i|ƒƒ|i |i|ii ƒjƒdS(Ni( RR-R.R/RR RR(R3R9R>(RR((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyttestRemoveContext¯s   %cCsFd}|iƒ|ii|||fƒ|it|iƒdƒdS(Ni(RR-R RR.R/(RtAny((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt testRemoveAny¹s c CsÈ |i}|i}|i}|i}|i}|i}|i}|i}|i} |i i } |i } t |i i |ƒ} | i } d}|iƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒxC| |i|ƒgD],}| t|i||ƒƒt||fƒƒ| t|i||ƒƒt|fƒƒ| t|i||ƒƒt|||gƒƒ| t|i||ƒƒtƒƒ| t|i||ƒƒt||gƒƒ| t|i||ƒƒt||gƒƒ| t|i||ƒƒt||gƒƒ| t|i||ƒƒt|gƒƒ| t|i||ƒƒt|gƒƒ| t|i||ƒƒt|gƒƒ| t|i||ƒƒt|gƒƒ| t|i||ƒƒt|gƒƒ| t|i|ƒƒt||f||fgƒƒ| t|i|ƒƒt||f||f||f||f||fgƒƒ| t|i|ƒƒt||f||fgƒƒ| t|i|ƒƒt||f||f||fgƒƒ| t|i|ƒƒt||f||fgƒƒ| t|i|ƒƒt||f||f||fgƒƒ| t|i|ƒƒt||f||f||fgƒƒ| t|i|ƒƒt||fgƒƒ| t|ƒt|||f|||f|||f|||f|||f|||f|||fgƒƒq:W|iƒ| tt| |||fƒƒƒdƒ| tt| |||fƒƒƒdƒdS(Niiiiii(RRRRRRRRR.R ttriplesRRRR*R/R?R(tsettsubjectstobjectst predicatestsubject_objectstpredicate_objectstsubject_predicatesR+(RRRRRRRRRtasserteRER tc1grapht c1triplesRCR;((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyt testTriples¿s¦             ((((((((((((((((((((((((((((((((((((((((((+(."+++(((((4O4=4==+q ((t__name__t __module__RRtslowtestRR!R(R*R+R-R0R1R2R7R8R:RARBRDRP(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyRs$            tZODBContextTestCasecBseZdZeZRS(tZODB(RQRRRtFalseRS(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyRTCssCan not test ZODB store: %stMySQLContextTestCasecBseZdZeZRS(R(RQRRRRVRS(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyRWLssCan not test MySQL store: %stRedlandContextTestCasecBseZdZeZRS(tRedland(RQRRRRVRS(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyRXUssCan not test Redland store: %st__main__(tunittestttempfileRtrdflibt rdflib.GraphRtTestCaseRt persistentRTt ImportErrortetMySQLdbRWtRDFRXRQtmain(((s7/Users/eikeon/rdflib-svn/branches/2.4.x/test/context.pyts,  ÿ:     rdflib-2.4.2/test/test_empty_xml_base.py0000644000175000017500000000366611153616026017337 0ustar nachonacho""" Test for empty xml:base values xml:base='' should resolve to the given publicID per XML Base specification and RDF/XML dependence on it """ from rdflib import ConjunctiveGraph, Literal, URIRef, Namespace, RDF from StringIO import StringIO import unittest FOAF = Namespace('http://xmlns.com/foaf/0.1/') test_data = """ """ test_data2 = """ """ baseUri = URIRef('http://example.com/') baseUri2 = URIRef('http://example.com/foo/bar') class TestEmptyBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data),publicID=baseUri) def test_base_ref(self): self.failUnless(len(self.graph) == 1,"There should be at least one statement in the graph") self.failUnless((baseUri,RDF.type,FOAF.Document) in self.graph,"There should be a triple with %s as the subject" % baseUri) class TestRelativeBase(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data2),publicID=baseUri2) def test_base_ref(self): self.failUnless(len(self.graph) == 1,"There should be at least one statement in the graph") resolvedBase = URIRef('http://example.com/baz') self.failUnless((resolvedBase,RDF.type,FOAF.Document) in self.graph,"There should be a triple with %s as the subject" % resolvedBase) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/test_bdb_transaction.pyc0000644000175000017500000002273511164771240017626 0ustar nachonachoÑò O~ÑIc @s<ddkZddkZddklZddklZddklZlZl Z ddk l Z l Z ddk l Z ddklZd„Zgd „Zgd „Zd efd „ƒYZd efd„ƒYZdeifd„ƒYZedjo;eiƒZeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiƒZeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeiedƒƒeied ƒƒeied!ƒƒeiedƒƒeiƒZeiedƒƒeiedƒƒeied!ƒƒeied"ƒƒeied#ƒƒeied$ƒƒeid%d&ƒieƒeid%d&ƒieƒeid%d&ƒieƒndS('iÿÿÿÿN(tContextTestCase(t GraphTestCase(tURIReftLiteraltConjunctiveGraph(tThreadt currentThread(trandom(tmkdtempcCstttƒƒƒS(N(RtstrR(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt random_uri sc CsÁtiƒ}x‰td|ƒD]x}yKtƒ}tƒ}tƒ}|i|||fƒ|i|||fƒWqtj o} dG| GH| ‚qXqWd|tiƒ|tƒiƒfGHdS(Niscould not perform ops&%s triples, add time: %.4f, thread: %s(ttimetrangeR taddtappendt ExceptionRtgetName( t performed_opstgraphtnum_opstinputtt1titstptote((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt worker_add s     c CsØtiƒ}x£td|ƒD]’}yny|iƒ\}}}Wn"tƒ}tƒ}tƒ}nX|i|||fƒ|i|||fƒWqtj o} | ‚qXqWdtiƒ|tƒiƒfGHdS(Nisremove time: %.4f, thread: %s( R R tpopR tremoveRRRR( RRRRRRRRRR((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt worker_removes    t TestBDBGraphcBseZdZRS(t BerkeleyDB(t__name__t __module__t store_name(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyR2stTestBDBContextcBseZdZRS(R (R!R"tstore(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyR$5stTestBDBTransactionscBszeZeZd„Zd„Zd„Zddgd„Zd„Zd„Z d„Z d „Z d „Z d „Z d „ZRS( cCs>tddƒ|_tdƒ|_|ii|idtƒdS(NR%R itcreate(RRRtpathtopentTrue(tself((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pytsetUp<scCs|iiƒdS(N(Rtclose(R+((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyttearDownAscCsOt|tƒp t|tƒptt|ƒ‚td|iid|d|ƒS(NR%t identifiertnamespace_manager(t isinstanceRtBNodetAssertionErrorttypetGraphRR%(R+R/((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt get_contextDs i ièc CsŠg}g}x\td|ƒD]K}td|d||i|fdh|d6ƒ}|i|ƒ|iƒqWx|D]}|iƒqrW|S(NittargettargstkwargsR(R RRRtstarttjoin( R+tworkertworkersttriplesRtall_opstpoolRtt((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt__manyOpsManyThreadsJs. cCsId}d}|itd|d|ƒ|it|iƒ||jƒdS(NiièR=R>(t(_TestBDBTransactions__manyOpsManyThreadsRt failUnlesstlenR(R+twRA((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyttestAddManyManyThreadsXscCsZ|itddddƒ}|itddddd|ƒ|it|iƒdjƒdS(NR=iR>i iRi(RCRRRDRER(R+tops((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt testRemove`scCsm|itddddƒ}y|iidƒWntj o}|‚nX|it|iƒdjƒdS(NR=iR>i i(NNN(RCRRRtNoneRRDRE(R+RHR((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt testRemoveAllfs  cs~d‰‡‡fd†}‡fd†}td|ƒ}td|ƒ}|iƒtidƒ|iƒ|iƒ|iƒdS(Niècs<ytgˆiˆƒWntj o}dG|GHnXdS(Ns got exc: (RRR(R(R+R>(sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt_worker_transactionuscs†ˆiiiƒyAg}ˆiidƒD] }||q*~}ˆiiiƒWn.tj o"}dG|GHˆiiiƒnXdS(Ns got exc: (NNN(RR%t begin_txnR>RJtcommitRtrollback(t_[1]trtresR(R+(sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt_read~s- R7gš™™™™™¹?(RR:R tsleepR;(R+RLRStadd_ttread_t((R>R+sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt testReadWriters      c s£d}d‰‡‡fd†}g}x=td|ƒD],}td|ƒ}|i|ƒ|iƒq4Wx|D]}|iƒqkWˆitˆiƒ|ˆjƒdS(NiiÐc stiƒ}t}d}xÏ|pLjiiiƒ}yXt}xKtdˆƒD]:}tƒ}tƒ}tƒ}ˆii|||fƒqMWWn@tj o4} ˆiii ƒti d|ƒ|d>}qXˆiii ƒt }qWdˆtiƒ|t ƒiƒfGHdS(Niigš™™™™™¹?s%%s triples add time: %.4f, thread: %s(R tFalseRR%RMR R R RRORTRNR*RR( RtsuccesstdelayttxntretryRRRRR(R+R>(sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt_worker–s*    ! iR7(R RRR:R;RDRER(R+R=R]R@RRA((R>R+sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyttestAddUserTransaction’s csˆd‰‡‡fd†}‡fd†}td|ƒ}td|ƒ}|iƒtidƒ|iƒ|iƒdGH|iƒdGHdS(Niècslˆiiiƒy'tgˆiˆƒˆiiiƒWn.tj o"}dG|GHˆiiiƒnXdS(Ns got exc: (RR%RMRRNRRO(R(R+R>(sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyRLÁs csˆiiidtƒdS(Ntcommit_pending_transaction(RR%R-R*((R+(sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt_closeÊsR7gà?s add finishedsclose finished(RR:R RTR;(R+RLR`RUtclose_t((R>R+sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyttestCloseCommit¾s      cCs0|iiiƒ|iii|idtƒdS(NR'(RR%R-R)R(RX(R+((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyt testCloseOpenÞs(R!R"R*tslowtestR,R.R6RCRGRIRKRWR^RbRc(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pyR&8s      , t__main__RGR^RIRKRbRcRWttestAddttestLenInOneContextttestLenInMultipleContextsttestConjunctionttestRemoveInMultipleContextst testContextsttestRemoveContextt testRemoveAnyt testTriplesttestStatementNodettestGraphValuet testConnectedt verbosityi(tunittestR tcontextRRRtrdflibRRRt threadingRRRttempfileRR RRRR$tTestCaseR&R!t TestSuitet bdb_suitetaddTestt context_suitet graph_suitetTextTestRunnertrun(((sD/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_bdb_transaction.pytsV   «    rdflib-2.4.2/test/test_literal.pyc0000644000175000017500000000234211164177226016122 0ustar nachonachoÑò ¯Ic@sfddkZddkZddklZlZdeifd„ƒYZedjoeiƒndS(iÿÿÿÿN(tURIReftLiteraltTestRelativeBasecBs#eZd„Zd„Zd„ZRS(cCsdS(N((tself((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/test_literal.pytsetUpscCs2tdƒ}tt|ƒƒ}|i||ƒdS(Nt'(Rtevaltreprt assertEquals(Rtatb((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/test_literal.pyttest_repr_apostrophe s cCs2tdƒ}tt|ƒƒ}|i||ƒdS(Nt"(RRRR(RR R ((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/test_literal.pyttest_repr_quotes (t__name__t __module__RR R (((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/test_literal.pyRs  t__main__(tunittesttrdflibRRtTestCaseRRtmain(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/test_literal.pyts   rdflib-2.4.2/test/sparql_empty_prefix.py0000644000175000017500000000141311153616026017351 0ustar nachonachofrom rdflib import ConjunctiveGraph from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . _:a foaf:name "Alice" . """ test_query = """PREFIX : SELECT ?name WHERE { ?x :name ?name . }""" correct = '"name" : {"type": "literal", "xml:lang" : "None", "value" : "Alice"}' class Query(unittest.TestCase): def testQueryPlus(self): graph = ConjunctiveGraph() graph.parse(StringIO(test_data), format="n3") result_json = graph.query(test_query).serialize(format='json') self.failUnless(result_json.find(correct) > 0) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/rdfa.pyc0000644000175000017500000000766611164176136014360 0ustar nachonachoÑò ¯Ic@sddkZddkZddkZddkZddkZddkZddklZddklZ ddkl Z ddkl Z ddkl Z ddkl Z d„Zd „Zd „Zee_d eifd „ƒYZed jo eƒndS(iÿÿÿÿN(tGraph(tConjunctiveGraph(tStringInputSource(tURIRef(tBNode(tLiteralcCsUtiƒ}xtƒD]}|i|ƒqWdGHtiddddƒi|ƒdS(Ns ------ RDFa Parser Tests ----- t verbosityit descriptionsi(tunittestt TestSuitet make_casestaddTesttTextTestRunnertrun(tsuitettest((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pytmains   ccs¬d}t}g}ti|ƒD]>}tii|ƒddjo|tii|ƒdq q ~}|iƒx4|D],}ttiitii||ƒƒƒVqxWdS(Ns test/rdfais.htmi( tFalsetostlistdirtpathtsplitexttsortt RDFaTestStubtabspathtjoin(ttestdirtverboset_[1]tftteststtestname((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyR sA ccs#xtƒD]}|ifVq WdS(N(R trunTest(R((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyt all_tests's RcBs,eZd„Zd„Zd„Zd„ZRS(cCs&tii|ƒ||_d|_dS(Nshttp://example.com/(RtTestCaset__init__ttestbasetpubId(tselfR$((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyR#.s cCsttii|iƒƒS(N(tstrRRtbasenameR$(R&((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pytshortDescription3scCsôt|tƒoB|iƒ}|dd!djo|d}ntit|ƒƒSt|tƒo>tt|ƒƒdjoti|i ƒStit|ƒƒSt|t ƒo4ti t|ƒd|i pdd|i pdƒStdƒ‚dS(Niis_:_:itlangtdtypesunexpected node value(t isinstanceRtn3tntriplestbNodeR'RtlentURIR%RtlanguagetNonetdatatypet Exception(R&tnodetbid((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyt nodeToString6s "c Cs{|id}|id}|itii|ƒ dƒtƒ}|i|d|iddƒ|iddƒ}t ƒ}xB|D]:\}}}|i i t t |i|||fƒƒƒq~Wtƒ} | i|d|iddƒ| iddƒ} t ƒ} xB| D]:\}}}| i i t t |i|||fƒƒƒqW|it|ƒt| ƒj d |iƒ|| fƒdS( Ns.htms.ttlsmissing expected results file.tpublicIDtformatR-tnttrdfas#In %s: results do not match. %s %s(R$tfailIfRRtisfiletRGraphtloadR%t serializeRttriplestaddttupletmapR8thashR)( R&ttestfiletresultsftstore1t pcontentstpgtatbtctstore2t qcontentstqg((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyR Fs&    /  /(t__name__t __module__R#R)R8R (((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyR,s   t__main__(RtsyststringtrdfdiffRR.RtrdflibRR?RRRRRR R!RtunstableR"RRR(((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/rdfa.pyts $      2 rdflib-2.4.2/test/n3_2.py0000644000175000017500000000631011153616026014016 0ustar nachonachofrom rdflib import URIRef, BNode, Literal, Variable from rdflib import RDF, RDFS from rdflib import StringInputSource from rdflib.Graph import QuotedGraph,ConjunctiveGraph import sys from pprint import pprint implies = URIRef("http://www.w3.org/2000/10/swap/log#implies") testN3=""" @prefix rdf: . @prefix rdfs: . @prefix : . {:a :b :c;a :foo} => {:a :d :c,?y}. _:foo a rdfs:Class. :a :d :c.""" #Thorough test suite for formula-aware store def testN3Store(store="default", configString=None): g = ConjunctiveGraph(store=store) if configString: g.destroy(configString) g.open(configString) g.parse(StringInputSource(testN3), format="n3") print g.store try: for s,p,o in g.triples((None,implies,None)): formulaA = s formulaB = o assert type(formulaA)==QuotedGraph and type(formulaB)==QuotedGraph a = URIRef('http://test/a') b = URIRef('http://test/b') c = URIRef('http://test/c') d = URIRef('http://test/d') v = Variable('y') universe = ConjunctiveGraph(g.store) #test formula as terms assert len(list(universe.triples((formulaA,implies,formulaB))))==1 #test variable as term and variable roundtrip assert len(list(formulaB.triples((None,None,v))))==1 for s,p,o in formulaB.triples((None,d,None)): if o != c: assert isinstance(o,Variable) assert o == v s = list(universe.subjects(RDF.type, RDFS.Class))[0] assert isinstance(s,BNode) assert len(list(universe.triples((None,implies,None)))) == 1 assert len(list(universe.triples((None,RDF.type,None)))) ==1 assert len(list(formulaA.triples((None,RDF.type,None))))==1 assert len(list(formulaA.triples((None,None,None))))==2 assert len(list(formulaB.triples((None,None,None))))==2 assert len(list(universe.triples((None,None,None))))==3 assert len(list(formulaB.triples((None,URIRef('http://test/d'),None))))==2 assert len(list(universe.triples((None,URIRef('http://test/d'),None))))==1 #context tests #test contexts with triple argument assert len(list(universe.contexts((a,d,c))))==1 #Remove test cases universe.remove((None,implies,None)) assert len(list(universe.triples((None,implies,None))))==0 assert len(list(formulaA.triples((None,None,None))))==2 assert len(list(formulaB.triples((None,None,None))))==2 formulaA.remove((None,b,None)) assert len(list(formulaA.triples((None,None,None))))==1 formulaA.remove((None,RDF.type,None)) assert len(list(formulaA.triples((None,None,None))))==0 universe.remove((None,RDF.type,RDFS.Class)) #remove_context tests universe.remove_context(formulaB) assert len(list(universe.triples((None,RDF.type,None))))==0 assert len(universe)==1 assert len(formulaB)==0 universe.remove((None,None,None)) assert len(universe)==0 g.store.destroy(configString) except: g.store.destroy(configString) raise rdflib-2.4.2/test/JSON.pyc0000644000175000017500000000510111164176136014173 0ustar nachonachoÑò ¯Ic@s’ddklZlZddklZddklZddkZdZdZdZ dZ d ei fd „ƒYZ e d joeiƒndS( iÿÿÿÿ(tConjunctiveGraphtplugin(tStore(tStringIONs) @prefix foaf: . @prefix rdf: . foaf:name "Alice" . foaf:knows . foaf:name "Bob" . s PREFIX foaf: SELECT ?name ?x ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } s¢"name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"}, "x" : {"type": "uri", "value" : "http://example.org/bob"} }s PREFIX foaf: SELECT ?name ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } tJSONcBs#eZd„Zd„Zd„ZRS(cCs>ttidtƒƒƒ|_|iittƒddƒdS(NtIOMemorytformattn3(RRtgetRtgraphtparseRt test_data(tself((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/JSON.pytsetUp*scCsD|iitƒ}|iddƒ}|i|itƒdjƒdS(sˆ Verify the serialisation of the data as json contains an exact substring, with the comma in the correct place. RtjsoniN(R tqueryt test_queryt serializet failUnlesstfindtcorrect(R tresultst result_json((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/JSON.pyt testComma.scCsD|iitƒ}|iddƒ}|i|idƒdjƒdS(sW Verify that the "x", substring is omitted from the serialised output. RRs"x",iÿÿÿÿN(R Rttest_header_queryRRR(R RR((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/JSON.pyt testHeader7s(t__name__t __module__R RR(((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/JSON.pyR(s  t__main__(trdflibRRt rdflib.storeRRtunittestR RRRtTestCaseRRtmain(((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/JSON.pyts    rdflib-2.4.2/test/sparql_order_by.pyc0000644000175000017500000000324011164176137016614 0ustar nachonachoÑò ¯Ic@sŒddklZlZlZddklZddklZddkZdZdZ dei fd„ƒYZ e d joei ƒndS( iÿÿÿÿ(tConjunctiveGraphtplugintLiteral(tStore(tStringIONsP @prefix foaf: . @prefix rdf: . foaf:name "Bob" . foaf:name "Dave" . foaf:name "Alice" . foaf:name "Charlie" . sg PREFIX foaf: SELECT ?name WHERE { ?x foaf:name ?name . } ORDER BY ?name t TestOrderBycBseZd„ZRS(c Csšttidtƒƒƒ}|ittƒddƒ|itƒ}|i t g}t |ddddgƒD]\}}||d|jql~jƒdS( NtIOMemorytformattn3tAlicetBobtCharlietDavei( RRtgetRtparseRt test_datatqueryt test_queryt failUnlesstFalsetzip(tselftgraphtresultst_[1]trta((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_order_by.pyt testOrderBys(t__name__t __module__R(((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_order_by.pyRst__main__(trdflibRRRt rdflib.storeRRtunittestRRtTestCaseRRtmain(((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_order_by.pyts  rdflib-2.4.2/test/test_sparql_filter_bound.py0000644000175000017500000000164111153616026020354 0ustar nachonachofrom rdflib import Literal, ConjunctiveGraph, Namespace, BNode, URIRef DC = Namespace(u"http://purl.org/dc/elements/1.1/") FOAF = Namespace(u"http://xmlns.com/foaf/0.1/") graph = ConjunctiveGraph() s = BNode() graph.add((s, FOAF['givenName'], Literal('Alice'))) b = BNode() graph.add((b, FOAF['givenName'], Literal('Bob'))) graph.add((b, DC['date'], Literal("2005-04-04T04:04:04Z"))) def test_bound(): res = graph.query("""PREFIX foaf: PREFIX dc: PREFIX xsd: SELECT ?name WHERE { ?x foaf:givenName ?name . OPTIONAL { ?x dc:date ?date } . FILTER ( bound(?date) ) }""").serialize('python') expected = [Literal('Bob', lang=None, datatype=None)] assert res == expected, "Expected %s but got %s" % (expected, res) if __name__ == '__main__': test_bound() rdflib-2.4.2/test/mysql.pyc0000644000175000017500000000756511164176136014607 0ustar nachonachoÑò ¯Ic @sÐddklZlZlZddklZyddklZWnej oZ dGe GHnXddk TdZ d„Z e e _d„Ze e_d „Ze e_ed joed e ƒe ƒnd S( iÿÿÿÿ(t testN3StorettestN3timplies(t QuotedGraph(t REGEXTermsCan not test REGEX bits:(t*s&user=,password=,host=localhost,db=testc CsGtddƒ}|itƒ|ittƒddƒyñx5|idtdfƒD]\}}}|}|}qQWt |ƒt jot |ƒt jpt ‚t dƒ}t dƒ}t dƒ}t dƒ} t |iƒ} tt| idtd ƒdfƒƒƒd jpt ‚tt| idtd ƒdfƒƒƒd jpt ‚tt| itd ƒddfƒƒƒd jpt ‚tt| iddtd ƒfƒƒƒd jpt ‚tt| idtdƒdfƒƒƒd jpt ‚xW| idtdƒdfƒD]7\}}}||jpt ‚||jpt ‚qWxS|idtdƒdfƒD]3\}}}||jpt|tƒpt ‚qmWtt| iddtdƒfƒƒƒd jpt ‚tt| idtd ƒdfƒƒƒd jpt ‚tt|iiti ti|gƒƒƒd jpt ‚x@|iiti ti|gƒD]} t| tƒpt ‚qqWtt|iittd ƒgƒƒƒd jpt ‚x@|iit|tigƒD] } | i|ijpt ‚qéWtt|iitd ƒ||gƒƒƒdjpt ‚tt|iid||gƒƒƒdjpt ‚tt|iid||gƒƒƒdjpt ‚tt|iitdƒ| gdƒƒƒdjpt ‚tt|iidti ƒƒƒd jpt ‚tt|ii|| ti gƒƒƒd jpt ‚tt|ii|| gƒƒƒd jpt ‚tt|ii|dƒƒƒd jpt ‚tt|ii|td ƒgƒƒƒd jpt ‚tt|ii||gdƒƒƒd jpt ‚Wn|iitƒ‚nXdS(NtbackendtMySQLtformattn3s http://test/as http://test/bs http://test/cs http://test/ds.*22-rdf-syntax-ns.*is.*is .*formula.*$s .*implies$s.*test.*s.*type.*s .*schema.*is.*rdf-syntax.*(tGraphtopent configStringtparsetStringInputSourceRttriplestNoneRttypeRtAssertionErrortURIReftConjunctiveGraphRtlentlistRt isinstancetBNodetcontextstsubjectstRDFtRDFStClasst identifiertobjectstdestroy( tgtstptotformulaAtformulaBtatbtctdtuniversetsubj((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/mysql.pyt testRegex sb  -    88888(88;"8;55;282/89cCstdtƒtƒdS(NR(RR R-(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/mysql.pyttestRunHs cCsddkl}l}|dƒ}|itƒ|iƒ|idƒ}|iƒ|idddƒ|i dƒ|i dƒ|i dƒdS(Niÿÿÿÿ(tProfiletstatssrdflib-mysql.profilettimet cumulativetpcallsgš™™™™™¹?( thotshotR/R0truncallR.tclosetloadt strip_dirst sort_statst print_statst print_callerst print_callees(R/R0R#R"((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/mysql.pyt profileTestsNs      t__main__RN(tn3_2RRRt rdflib.GraphRtrdflib.store.MySQLRt ImportErrortetrdflibR R-tTruetnon_standard_depR.R=t__name__(((s5/Users/eikeon/rdflib-svn/branches/2.4.x/test/mysql.pyts   <       rdflib-2.4.2/test/test_sparql_xml_results.pyc0000644000175000017500000000662411164177226020440 0ustar nachonachoÑò ¯Ic@søddklZddklZddkZddkZdZdZedZy,ddkl Z dd d d d d gZ Wn(e j odddddgZ nXdei fd„ƒYZ eideiƒd„ZedjoeiƒndS(iÿÿÿÿ(tConjunctiveGraph(tStringIONsÓ @prefix rdf: . @prefix rdfs: . rdfs:label "Word"@en; rdf:value 1; rdfs:seeAlso [] . sž PREFIX rdf: PREFIX rdfs: PREFIX owl: s# SELECT ?s ?o WHERE { ?s ?p ?o . } (t MarkupWriteru> u2 u@ http://example.org/word u uf 1 u— http://example.org/word Word u?uZhttp://example.org/wordu'u€1http://example.org/wordWordtTestSparqlXmlResultscBs#eZd„Zd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtparseRt test_data(tself((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pytsetUpDs cCs|ittƒdS(N(t_query_result_containstquerytexpected_fragments(R ((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pyt testSimpleHscCsb|ii|ƒ}|iddƒ}t|ƒ}|GHx&|D]}|GH|i||jƒq<WdS(NRtxml(RR t serializet normalizet failUnless(R R t fragmentstresultst result_xmltfrag((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pyR Ks (t__name__t __module__R RR (((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pyRBs  s\s+cCs|id|ƒS(Nt (tsub(tstexp((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pyRUst__main__(trdflibRRtretunittestRtPROLOGUER tFt.XmlRR t ImportErrortTestCaseRtcompilet MULTILINERRtmain(((sG/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_xml_results.pyts2     rdflib-2.4.2/test/__init__.py0000644000175000017500000000000211153616026015004 0ustar nachonacho# rdflib-2.4.2/test/triple_store.pyc0000644000175000017500000000361611164176137016147 0ustar nachonachoÑò ¯Ic@svddkZddklZlZlZlZddklZdeifd„ƒYZ e djoei ƒndS(iÿÿÿÿN(tURIReftBNodetLiteraltRDFS(tGrapht GraphTestcBsAeZdZdZd„Zd„Zd„Zd„Zd„ZRS(tdefaulttstorecCs]td|iƒ|_|ii|iƒtƒtitdƒf|_ |ii |i ƒdS(NRt remove_me( RtbackendRtopentpathRRtlabelRRtadd(tself((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pytsetUp scCs|iiƒdS(N(Rtclose(R((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyttearDownscCs/tƒ}|ii|titdƒfƒdS(Ntfoo(RRR RR R(Rtsubject((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyttestAdds cCs'|ii|iƒ|iidƒdS(N(NNN(RtremoveRtNone(R((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyt testRemovescCs!x|iD]\}}}q WdS(N(R(Rtstpto((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyt testTripless ( t__name__t __module__R R RRRRR(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyRs    t__main__( tunittesttrdflibRRRRt rdflib.GraphRtTestCaseRRtmain(((s</Users/eikeon/rdflib-svn/branches/2.4.x/test/triple_store.pyts " rdflib-2.4.2/test/serializers/0000755000175000017500000000000011204354476015244 5ustar nachonachordflib-2.4.2/test/serializers/test_prettyxml.pyc0000644000175000017500000000675211164176136021102 0ustar nachonachoÑò ¯Ic@swddklZlZlZlZlZddklZddkl Z l Z l Z de fd„ƒYZ d„Z dS(iÿÿÿÿ(tConjunctiveGraphtURIReftLiteraltBNodetRDFS(tPrettyXMLSerializer(tSerializerTestBaset serializetserialize_and_loadtTestPrettyXmlSerializercBs,eZeZdZdZd„Zd„ZRS(s @prefix rdfs: . @prefix owl: . @prefix : . :value rdfs:domain :Test . :Test rdfs:subClassOf [ a owl:Restriction; owl:onProperty :value ], [ a owl:Restriction; owl:onProperty :name ] . a :Test; rdfs:seeAlso ; :value "A" . :name "Bee"@en, "Be"@sv; :value "B" . a rdfs:Resource; rdfs:seeAlso ; :value 3 . a rdfs:Resource; rdfs:seeAlso ; rdfs:seeAlso ; rdfs:seeAlso . _:bnode1 a :BNode; rdfs:seeAlso _:bnode2 . _:bnode2 a :BNode ; rdfs:seeAlso _:bnode3 . _:bnode3 a :BNode ; rdfs:seeAlso _:bnode2 . tn3cCs€t|i|iƒ}d|jpt‚d|jpt‚d|jpt‚d|jpt‚d|jp td‚dS(Ns,s7sBeesH3s . @prefix owl: . @prefix : . :value rdfs:domain :Test . :Test rdfs:subClassOf [ a owl:Restriction; owl:onProperty :value ], [ a owl:Restriction; owl:onProperty :name ] . a :Test; rdfs:seeAlso ; :value "A" . :name "Bee"@en, "Be"@sv; :value "B" . a rdfs:Resource; rdfs:seeAlso ; :value 3 . a rdfs:Resource; rdfs:seeAlso ; rdfs:seeAlso ; rdfs:seeAlso . _:bnode1 a :BNode; rdfs:seeAlso _:bnode2 . _:bnode2 a :BNode ; rdfs:seeAlso _:bnode3 . _:bnode3 a :BNode ; rdfs:seeAlso _:bnode2 . """ testContentFormat = 'n3' def test_result_fragments(self): rdfXml = serialize(self.sourceGraph, self.serializer) assert '' in rdfXml assert '' in rdfXml assert 'Bee' in rdfXml assert '3' in rdfXml assert '' in rdfXml, onlyBNodesMsg #assert not '." % (type(o), p) rdflib-2.4.2/test/test_sparql_xml_results.py0000644000175000017500000000547111153616026020266 0ustar nachonachofrom rdflib import ConjunctiveGraph from StringIO import StringIO import re import unittest test_data = """ @prefix rdf: . @prefix rdfs: . rdfs:label "Word"@en; rdf:value 1; rdfs:seeAlso [] . """ PROLOGUE = """ PREFIX rdf: PREFIX rdfs: PREFIX owl: """ query = PROLOGUE+""" SELECT ?s ?o WHERE { ?s ?p ?o . } """ try: from Ft.Xml import MarkupWriter expected_fragments = [ u""" """, u""" """, u""" http://example.org/word """, u""" """, u""" 1 """, (u""" http://example.org/word """ """ Word """) ] except ImportError: expected_fragments = [ #u"""""", u"""""", u"""http://example.org/word""", u"""""", u"""1""", u"""http://example.org/wordWord""" ] # TODO: # - better canonicalization of results to compare with (4Suite-XML has support for this) # - test expected 'variable'-elems in head class TestSparqlXmlResults(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph() self.graph.parse(StringIO(test_data), format="n3") def testSimple(self): self._query_result_contains(query, expected_fragments) def _query_result_contains(self, query, fragments): results = self.graph.query(query) result_xml = results.serialize(format='xml') result_xml = normalize(result_xml) # TODO: poor mans c14n.. print result_xml for frag in fragments: print frag self.failUnless(frag in result_xml) def normalize(s, exp=re.compile(r'\s+', re.MULTILINE)): return exp.sub(' ', s) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/sparql_empty_prefix.pyc0000644000175000017500000000252111164176137017523 0ustar nachonachoÑò ¯Ic@svddklZddklZddkZdZdZdZdeifd„ƒYZe d joei ƒndS( iÿÿÿÿ(tConjunctiveGraph(tStringIONs˜ @prefix foaf: . @prefix rdf: . _:a foaf:name "Alice" . sPPREFIX : SELECT ?name WHERE { ?x :name ?name . }sD"name" : {"type": "literal", "xml:lang" : "None", "value" : "Alice"}tQuerycBseZd„ZRS(cCs]tƒ}|ittƒddƒ|itƒiddƒ}|i|it ƒdjƒdS(Ntformattn3tjsoni( RtparseRt test_datatqueryt test_queryt serializet failUnlesstfindtcorrect(tselftgrapht result_json((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_empty_prefix.pyt testQueryPluss (t__name__t __module__R(((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_empty_prefix.pyRst__main__( trdflibRRtunittestRR R tTestCaseRRtmain(((sC/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_empty_prefix.pyts  rdflib-2.4.2/test/test_sparql_filters.py0000644000175000017500000000227011153616026017347 0ustar nachonacho# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, URIRef, Literal from StringIO import StringIO testContent = """ @prefix rdfs: . rdfs:label "Document 1"@en, "Dokument 1"@sv . rdfs:label "Document 2"@en, "Dokument 2"@sv . rdfs:label "Document 3"@en, "Dokument 3"@sv . """ graph = ConjunctiveGraph() graph.load(StringIO(testContent), format='n3') doc1 = URIRef("http://example.org/doc/1") PROLOGUE = """ PREFIX rdfs: """ def test_filter_by_lang(): testdata = [ ("en", u'"Document 1"@en'), ("sv", u'"Dokument 1"@sv') ] query = PROLOGUE+''' SELECT ?label WHERE { '''+doc1.n3()+''' rdfs:label ?label . FILTER(LANG(?label) = "%s") } ''' for lang, literal in testdata: res = graph.query(query % lang) actual = [binding.n3() for binding in res.selected] expected = [literal] yield assert_equal, actual, expected def assert_equal(v1, v2): assert v1 == v2, "Expected %r == %s" % (v1, v2) rdflib-2.4.2/test/sparql_parser_nestedbrackets.pyc0000644000175000017500000000126311164176137021367 0ustar nachonachoÑò ¯Ic@shddklZdZdZedjo;eeƒZeiiiZe eƒejo dGHqdndS(iÿÿÿÿ(tParsesŽ PREFIX foaf: SELECT ?name ?mbox WHERE { { ?x foaf:name ?name . } { ?x foaf:mbox ?mbox . } } s]{ [] }t__main__tPASSEDN( trdflib.sparql.bisonRtquerytcorrectt__name__tpt whereClausetparsedGraphPatternttmptstr(((sL/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_parser_nestedbrackets.pyts   rdflib-2.4.2/test/rdfdiff.py0000644000175000017500000000372211153616026014665 0ustar nachonacho#!/usr/bin/env python """ RDF Graph Isomorphism Tester Author: Sean B. Palmer, inamidst.com Uses the pyrple algorithm Requirements: Python2.4+ http://inamidst.com/proj/rdf/ntriples.py Usage: ./rdfdiff.py """ import sys, re, urllib import ntriples from ntriples import bNode ntriples.r_uriref = re.compile(r'<([^\s"<>]+)>') class Graph(object): def __init__(self, uri=None, content=None): self.triples = set() if uri: self.parse(uri) elif content: self.parse_string(content) def parse(self, uri): class Sink(object): def triple(sink, s, p, o): self.triples.add((s, p, o)) p = ntriples.NTriplesParser(sink=Sink()) u = urllib.urlopen(uri) p.parse(u) u.close() def parse_string(self, content): class Sink(object): def triple(sink, s, p, o): self.triples.add((s, p, o)) p = ntriples.NTriplesParser(sink=Sink()) p.parsestring(content) def __hash__(self): return hash(tuple(sorted(self.hashtriples()))) def hashtriples(self): for triple in self.triples: g = ((isinstance(t, bNode) and self.vhash(t)) or t for t in triple) yield hash(tuple(g)) def vhash(self, term, done=False): return tuple(sorted(self.vhashtriples(term, done))) def vhashtriples(self, term, done): for t in self.triples: if term in t: yield tuple(self.vhashtriple(t, term, done)) def vhashtriple(self, triple, term, done): for p in xrange(3): if not isinstance(triple[p], bNode): yield triple[p] elif done or (triple[p] == term): yield p else: yield self.vhash(triple[p], done=True) def compare(p, q): return hash(Graph(p)) == hash(Graph(q)) def compare_from_string(p, q): return hash(Graph(content=p)) == hash(Graph(content=q)) def main(): result = compare(sys.argv[1], sys.argv[2]) print ('no', 'yes')[result] if __name__=="__main__": main() rdflib-2.4.2/test/advanced_sparql_constructs.py0000644000175000017500000000520311153616026020673 0ustar nachonachoimport unittest from rdflib.Namespace import Namespace from rdflib import plugin,RDF,RDFS,URIRef from rdflib.store import Store from cStringIO import StringIO from rdflib.Graph import Graph,ReadOnlyGraphAggregate,ConjunctiveGraph import sys from pprint import pprint testGraph1N3=""" @prefix rdf: . @prefix rdfs: . @prefix : . :foo :relatedTo [ a rdfs:Class ]; :parentOf ( [ a rdfs:Class ] ). :bar :relatedTo [ a rdfs:Resource ]; :parentOf ( [ a rdfs:Resource ] ). ( [ a rdfs:Resource ] ) :childOf :bar. ( [ a rdfs:Class ] ) :childOf :foo. """ sparqlQ1 = \ """ BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ?node :relatedTo [ a rdfs:Class ] }""" sparqlQ2 = \ """ BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ?node :parentOf ( [ a rdfs:Class ] ) }""" sparqlQ3 = \ """ BASE PREFIX rdf: PREFIX rdfs: SELECT ?node WHERE { ( [ a rdfs:Resource ] ) :childOf ?node }""" sparqlQ4 = \ """ PREFIX owl: SELECT DISTINCT ?class FROM WHERE { ?thing a ?class }""" class AdvancedTests(unittest.TestCase): def setUp(self): memStore = plugin.get('IOMemory',Store)() self.testGraph = Graph(memStore) self.testGraph.parse(StringIO(testGraph1N3),format='n3') def testNamedGraph(self): from sets import Set OWL_NS = Namespace("http://www.w3.org/2002/07/owl#") rt = self.testGraph.query(sparqlQ4) self.assertEquals(Set(rt.serialize('python')),Set([OWL_NS.OntologyProperty,OWL_NS.Class,OWL_NS.Ontology,OWL_NS.AnnotationProperty,RDF.Property,RDFS.Class])) def testScopedBNodes(self): rt = self.testGraph.query(sparqlQ1) self.assertEquals(rt.serialize('python')[0],URIRef("http://test/foo")) def testCollectionContentWithinAndWithout(self): rt = self.testGraph.query(sparqlQ3) self.assertEquals(rt.serialize('python')[0],URIRef("http://test/bar")) def testCollectionAsObject(self): rt = self.testGraph.query(sparqlQ2) self.assertEquals(rt.serialize('python')[0],URIRef("http://test/foo")) self.assertEquals(1,len(rt)) if __name__ == '__main__': suite = unittest.makeSuite(AdvancedTests) unittest.TextTestRunner(verbosity=3).run(suite)rdflib-2.4.2/test/BisonSPARQLParser/0000755000175000017500000000000011204354476016062 5ustar nachonachordflib-2.4.2/test/BisonSPARQLParser/test.py0000755000175000017500000002762411153616026017424 0ustar nachonachofrom rdflib.sparql.bison import Parse from rdflib import plugin, Namespace,URIRef, RDF from rdflib.store import Store, VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.Graph import Graph, ConjunctiveGraph from sets import Set import os from cStringIO import StringIO from pprint import pprint EVALUATE = True DEBUG_PARSE = False STORE='IOMemory' configString = '' #class TestClassAndType(unittest.TestCase): # # def setUp(self): # # def tearDown(self): # # def testType(self): # # def testClass1(self): test = [ 'data/local-constr/expr-2.rq', #'data/examples/ex11.2.3.2_1.rq', #'data/TypePromotion/tP-unsignedByte-short.rq' #'data/examples/ex11.2.3.1_0.rq', #'data/ValueTesting/typePromotion-decimal-decimal-pass.rq', # 'data/examples/ex11.2.3.2_0.rq', # 'data/SyntaxFull/syntax-union-02.rq', #'data/part1/dawg-query-004.rq', ] tests2Skip = [ 'data/examples/ex11.2.3.1_1.rq',#Compares dateTime with same time, different time-zones 'data/examples/ex11_1.rq', #Compares with literal BNode labels! 'data/SyntaxFull/syntax-bnodes-03.rq', #BNode as a predicate (not allowed by grammar) 'data/SyntaxFull/syntax-qname-04.rq', #Grammar Ambiguity with ':' matching as QNAME & QNAME_NS 'data/SyntaxFull/syntax-qname-05.rq', #Same as above 'data/SyntaxFull/syntax-qname-11.rq', #Same as above 'data/SyntaxFull/syntax-lit-10.rq' , #BisonGen's Lexer is chopping up STRING_LITERAL_LONG1 tokens 'data/SyntaxFull/syntax-lit-12.rq' , #same as above 'data/SyntaxFull/syntax-lit-14.rq' , #same as above 'data/SyntaxFull/syntax-lit-15.rq' , #same as above 'data/SyntaxFull/syntax-lit-16.rq' , #same as above 'data/SyntaxFull/syntax-lit-17.rq' , #same as above 'data/SyntaxFull/syntax-lit-20.rq' , #same as above 'data/unsaid-inference/query-01.rq' , #WHERE without '{ }' 'data/unsaid-inference/query-02.rq' , #same as above 'data/unsaid-inference/query-03.rq' , #same as above 'data/part1/dawg-query-001.rq' , #no space between variable name and }: .. OPTIONAL { ?person foaf:mbox ?mbox} 'data/part1/dawg-query-003.rq' , #Same as above 'data/regex/regex-query-003.rq' , #BisonGen's Lexer is chopping up STRING_LITERAL_LONG1 tokens 'data/regex/regex-query-004.rq' , #Same as above 'data/simple2/dawg-tp-01.rq' , #WHERE without '{ }' 'data/simple2/dawg-tp-02.rq' , #same as above 'data/simple2/dawg-tp-03.rq' , #same as above 'data/simple2/dawg-tp-04.rq' , #same as above 'data/SourceSimple/source-simple-01.rq', #WHERE without '{ }' 'data/SourceSimple/source-simple-02.rq', #Illegal syntax 'data/SourceSimple/source-simple-03.rq', #Illegal syntax 'data/SourceSimple/source-simple-04.rq', #Illegal syntax 'data/SourceSimple/source-simple-05.rq', #Illegal syntax 'data/source-named/query-8.1.rq', #WHERE without '{ }' 'data/source-named/query-8.2.rq', #same as above 'data/source-named/query-8.3.rq', #same as above 'data/source-named/query-8.4.rq', #same as above 'data/source-named/query-8.5.rq', #same as above 'data/source-named/query-9.1.rq', #same as above 'data/source-named/query-9.2.rq', #same as above 'data/survey/query-survey-1.rq', #not sure if the VARNAME token includes ']'. If it does then the test is invalid 'data/survey/query-survey-9.rq', #same as above 'data/Sorting/one-of-one-column.rq' #same as above 'data/ValueTesting/dateTime-tz0.rq', #bad syntax 'data/Sorting/one-of-one-column.rq',#not sure if the VARNAME token includes ']'. If it does then the test is invalid 'data/ValueTesting/dateTime-tz0.rq',#bad syntax 'data/ValueTesting/dateTime-tz1.rq',#same as above 'data/ValueTesting/boolean-logical-OR.rq',#boolean literal is lowercase not uppercase 'data/ValueTesting/boolean-true-canonical.rq',#same as above 'data/ValueTesting/boolean-EBV-canonical.rq',#samve as above 'data/ValueTesting/boolean-equiv-TRUE.rq',#same as above 'data/ValueTesting/boolean-false-canonical.r',#same as above 'data/ValueTesting/boolean-false-canonical.rq',# 'data/ValueTesting/boolean-equiv-FALSE.rq',# 'data/ValueTesting/extendedType-ne-pass.rq',#[27] Constraint ::= 'FILTER' BrackettedExpression <-- 'data/examples/ex11_0.rq', #TimeZone info on xsd:dateTime 'data/local-constr/expr-2.rq', #Unable to deal with external filter against variable visible only to OPTIONAL ] MANIFEST_NS = Namespace('http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#') MANIFEST_QUERY_NS = Namespace('http://www.w3.org/2001/sw/DataAccess/tests/test-query#') TEST_BASE = Namespace('http://www.w3.org/2001/sw/DataAccess/tests/') RESULT_NS = Namespace('http://www.w3.org/2001/sw/DataAccess/tests/result-set#') manifestNS = { u"rdfs": Namespace("http://www.w3.org/2000/01/rdf-schema#"), u"mf" : Namespace("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#"), u"qt" : Namespace("http://www.w3.org/2001/sw/DataAccess/tests/test-query#"), } MANIFEST_QUERY = \ """ SELECT ?source ?testName ?testComment ?result WHERE { ?testCase mf:action ?testAction; mf:name ?testName; mf:result ?result. ?testAction qt:query ?query; qt:data ?source. OPTIONAL { ?testCase rdfs:comment ?testComment } }""" PARSED_MANIFEST_QUERY = Parse(MANIFEST_QUERY) def bootStrapStore(store): rt = store.open(configString,create=False) if rt == NO_STORE: store.open(configString,create=True) else: store.destroy(configString) store.open(configString,create=True) def trialAndErrorRTParse(graph,queryLoc,DEBUG): qstr = StringIO(open(queryLoc).read()) try: graph.parse(qstr,format='n3') return True except Exception, e: if DEBUG: print e print "#### Parse Failure (N3) ###" print qstr.getvalue() print "#####"*5 try: graph.parse(qstr) assert list(graph.objects(None,RESULT_NS.resultVariable)) return True except Exception, e: if DEBUG: print e print "#### Parse Failure (RDF/XML) ###" print qstr.getvalue() print "#### ######### ###" return False def testBasic(DEBUG = False): from glob import glob from sre import sub for testFile in glob('data/examples/*.rq'):#glob('data/*/*.rq'): store = plugin.get(STORE,Store)() bootStrapStore(store) store.commit() prefix = testFile.split('.rq')[-1] manifestPath = '/'.join(testFile.split('/')[:-1]+['manifest.n3']) manifestPath2 = '/'.join(testFile.split('/')[:-1]+['manifest.ttl']) queryFileName = testFile.split('/')[-1] store = plugin.get(STORE,Store)() store.open(configString,create=False) assert len(store) == 0 manifestG=ConjunctiveGraph(store) if not os.path.exists(manifestPath): assert os.path.exists(manifestPath2) manifestPath = manifestPath2 manifestG.default_context.parse(open(manifestPath),publicID=TEST_BASE,format='n3') manifestData = \ manifestG.query( PARSED_MANIFEST_QUERY, initBindings={'?query' : TEST_BASE[queryFileName]}, initNs=manifestNS, DEBUG = False) store.rollback() store.close() for source,testCaseName,testCaseComment,expectedRT in manifestData: if expectedRT: expectedRT = '/'.join(testFile.split('/')[:-1]+[expectedRT.replace(TEST_BASE,'')]) if source: source = '/'.join(testFile.split('/')[:-1]+[source.replace(TEST_BASE,'')]) testCaseName = testCaseComment and testCaseComment or testCaseName print "## Source: %s ##"%source print "## Test: %s ##"%testCaseName print "## Result: %s ##"%expectedRT #Expected results if expectedRT: store = plugin.get(STORE,Store)() store.open(configString,create=False) resultG=ConjunctiveGraph(store).default_context # if DEBUG: # print "###"*10 # print "parsing: ", open(expectedRT).read() # print "###"*10 assert len(store) == 0 print "## Parsing (%s) ##"%(expectedRT) if not trialAndErrorRTParse(resultG,expectedRT,DEBUG): if DEBUG: print "Unexpected result format (for %s), skipping"%(expectedRT) store.rollback() store.close() continue if DEBUG: print "## Done .. ##" rtVars = [rtVar for rtVar in resultG.objects(None,RESULT_NS.resultVariable)] bindings = [] resultSetNode = resultG.value(predicate=RESULT_NS.value,object=RESULT_NS.ResultSet) for solutionNode in resultG.objects(resultSetNode,RESULT_NS.solution): bindingDict = dict([(key,None) for key in rtVars]) for bindingNode in resultG.objects(solutionNode,RESULT_NS.binding): value = resultG.value(subject=bindingNode,predicate=RESULT_NS.value) name = resultG.value(subject=bindingNode,predicate=RESULT_NS.variable) bindingDict[name] = value bindings.append(tuple([bindingDict[vName] for vName in rtVars])) if DEBUG: print "Expected bindings: ", bindings print open(expectedRT).read() store.rollback() store.close() if testFile.startswith('data/NegativeSyntax'): try: query = open(testFile).read() p = Parse(query,DEBUG) except: continue else: raise Exception("Test %s should have failed!"%testFile) if testFile in tests2Skip: print "Skipping test (%s)"%testCaseName continue query = open(testFile).read() print "### %s (%s) ###"%(testCaseName,testFile) print query p = Parse(query,DEBUG_PARSE) if DEBUG: print p if EVALUATE and source: if DEBUG: print "### Source Graph: ###" print open(source).read() store = plugin.get(STORE,Store)() store.open(configString,create=False) g=ConjunctiveGraph(store) try: g.parse(open(source),format='n3') except: print "Unexpected data format (for %s), skipping"%(source) store.rollback() store.close() continue #print store rt = g.query(p,DEBUG = DEBUG) if expectedRT: if rt != bindings and Set([Set(i) for i in rt]) != Set([Set(i) for i in bindings]):#unorderedComparison(rt,bindings): print "### Expected Result (%s) ###"%expectedRT pprint(bindings) print "### Actual Results ###" pprint(rt) raise Exception("### TEST FAILED!: %s ###"%testCaseName) else: print "### TEST PASSED!: %s ###"%testCaseName store.rollback() if __name__ == '__main__': import sys if len(sys.argv) > 1: testBasic(bool(int(sys.argv[1]))) else: testBasic() # suite1 = unittest.makeSuite(TestClassAndType) # suite2 = unittest.makeSuite(TestReason) # unittest.TextTestRunner(verbosity=3).run(suite1) # unittest.TextTestRunner(verbosity=3).run(suite2) rdflib-2.4.2/test/BisonSPARQLParser/README.txt0000644000175000017500000000066711153616026017564 0ustar nachonachoThis harness is meant to work against the DAWG test cases: http://www.w3.org/2001/sw/DataAccess/tests/ The contents should be uncompressed to this directory and test.py should be run from the command line. Currently it only checks SPARQL parsing. I.e., it doesn't check against the results of evaluating the parsed expression, just checks that it parses it. Any exceptions that are thrown by the generated Bison parser will be thrown. rdflib-2.4.2/test/events.pyc0000644000175000017500000000666711164176136014750 0ustar nachonachoÑò ¯Ic@s±ddkZddklZdeifd„ƒYZdeifd„ƒYZd„Zd„Zd eifd „ƒYZ d ei fd „ƒYZ e d joei ƒndS(iÿÿÿÿN(teventst AddedEventcBseZRS((t__name__t __module__(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/events.pyRst RemovedEventcBseZRS((RR(((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/events.pyRscCs*|it|iƒ|it|iƒdS(N(t subscribeRt _add_handlerRt_remove_handler(tsourcettarget((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/events.pyt subscribe_to scCsDx=|D]5}x,|D]$}||jot||ƒqqWqWdS(N(R (tcachestcachetother((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/events.pyt subscribe_all s  tCachecBsJeZdd„Zd„Zd„Zd„Zd„Zd„Zd„Z RS(cCsJ|djo h}n||_|it|iƒ|it|iƒdS(N(tNonet_dataRRRRR(tselftdata((s6/Users/eikeon/rdflib-svn/branches/2.4.x/test/events.pyt__init__s   cCs|i|i|is    rdflib-2.4.2/test/test_sparql_naf2.pyc0000644000175000017500000000360011164177226016674 0ustar nachonachoÑò ¯Ic@sŽddklZlZlZlZddklZddkZdZedƒZdZ dei fd„ƒYZ e d joei ƒndS( iÿÿÿÿ(tConjunctiveGraphtURIReftLiteraltRDFS(tStringIONs @prefix foaf: . foaf:name "Chime"; a foaf:Person. foaf:knows ,. foaf:name "Ivan".shttp://eikeon.com/uÑ PREFIX foaf: SELECT ?X WHERE { ?P a foaf:Person . ?X foaf:knows ?P . OPTIONAL { ?X foaf:knows ?OP . ?OP foaf:name "Judas" } FILTER (!bound(?OP)) }tTestSparqlOPT_FILTER2cBseZd„Zd„ZRS(cCs,tƒ|_|iittƒddƒdS(Ntformattn3(RtgraphtloadRt testContent(tself((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf2.pytsetUps cCs`|iitdtƒiddƒ}t|ƒ}|i|tgjdtgt|ƒfƒdS(NtDEBUGRtpythonsexpecting : %s . Got: %s( RtquerytQUERYtFalset serializetlistt failUnlesstdoc1trepr(R tresults((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf2.pyttest_OPT_FILTER s   (t__name__t __module__R R(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf2.pyRs t__main__(trdflibRRRRRtunittestR RRtTestCaseRRtmain(((s@/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_sparql_naf2.pyts"   rdflib-2.4.2/test/test_not_equals.pyc0000644000175000017500000000272211164177226016642 0ustar nachonachoÑò ¯Ic@s›ddklZddklZlZlZlZlZlZddk l Z l Z l Z ddk Z ddklZd„Zedjo eƒndS(iÿÿÿÿ(t Namespace(tplugintRDFtRDFStURIReftStringInputSourcetLiteral(tGraphtReadOnlyGraphAggregatetConjunctiveGraphN(tpprintcCsd}tƒ}|itdtiƒddƒ|iddhtid6dtƒ}x2|D]*}|d }|td ƒjpt‚q[WdS( Nuhttp://example.org/sy @prefix : . @prefix rdf: <%s> . :foo rdf:value 1. :bar rdf:value 2.tformattn3s·SELECT ?node WHERE { ?node rdf:value ?val. FILTER (?val != 1) }tinitNstrdftDEBUGishttp://example.org/bar( R tparseRRtRDFNStquerytFalseRtAssertionError(tNStgraphtrttrowtitem((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_not_equals.pyttestSPARQLNotEqualss    t__main__(trdflib.NamespaceRtrdflibRRRRRRt rdflib.GraphRRR tsysR Rt__name__(((s?/Users/eikeon/rdflib-svn/branches/2.4.x/test/test_not_equals.pyts.   rdflib-2.4.2/test/test_literal.py0000644000175000017500000000067611153616026015761 0ustar nachonachoimport unittest import rdflib from rdflib import URIRef, Literal class TestRelativeBase(unittest.TestCase): def setUp(self): pass def test_repr_apostrophe(self): a = Literal("'") b = eval(repr(a)) self.assertEquals(a, b) def test_repr_quote(self): a = Literal('"') b = eval(repr(a)) self.assertEquals(a, b) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/JSON.py0000644000175000017500000000355311153616026014034 0ustar nachonachofrom rdflib import ConjunctiveGraph, plugin from rdflib.store import Store from StringIO import StringIO import unittest test_data = """ @prefix foaf: . @prefix rdf: . foaf:name "Alice" . foaf:knows . foaf:name "Bob" . """ test_query = """ PREFIX foaf: SELECT ?name ?x ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } """ correct = """"name" : {"type": "literal", "xml:lang" : "None", "value" : "Bob"}, "x" : {"type": "uri", "value" : "http://example.org/bob"} }""" test_header_query = """ PREFIX foaf: SELECT ?name ?friend WHERE { ?x foaf:name ?name . OPTIONAL { ?x foaf:knows ?friend . } } """ # See Also: http://rdflib.net/pipermail/dev/2006-November/000112.html class JSON(unittest.TestCase): def setUp(self): self.graph = ConjunctiveGraph(plugin.get('IOMemory',Store)()) self.graph.parse(StringIO(test_data), format="n3") def testComma(self): """ Verify the serialisation of the data as json contains an exact substring, with the comma in the correct place. """ results = self.graph.query(test_query) result_json = results.serialize(format='json') self.failUnless(result_json.find(correct) > 0) def testHeader(self): """ Verify that the "x", substring is omitted from the serialised output. """ results = self.graph.query(test_header_query) result_json = results.serialize(format='json') self.failUnless(result_json.find('"x",') == -1) if __name__ == "__main__": unittest.main() rdflib-2.4.2/test/mysql.py0000644000175000017500000000656411153616026014435 0ustar nachonachofrom n3_2 import testN3Store,testN3,implies from rdflib.Graph import QuotedGraph try: from rdflib.store.MySQL import REGEXTerm except ImportError, e: print "Can not test REGEX bits:", e from rdflib import * configString="user=,password=,host=localhost,db=test" def testRegex(): g = Graph(backend='MySQL') g.open(configString) g.parse(StringInputSource(testN3), format="n3") try: for s,p,o in g.triples((None,implies,None)): formulaA = s formulaB = o assert type(formulaA)==QuotedGraph and type(formulaB)==QuotedGraph a = URIRef('http://test/a') b = URIRef('http://test/b') c = URIRef('http://test/c') d = URIRef('http://test/d') universe = ConjunctiveGraph(g.backend) #REGEX triple matching assert len(list(universe.triples((None,REGEXTerm('.*22-rdf-syntax-ns.*'),None))))==1 assert len(list(universe.triples((None,REGEXTerm('.*'),None))))==3 assert len(list(universe.triples((REGEXTerm('.*formula.*$'),None,None))))==1 assert len(list(universe.triples((None,None,REGEXTerm('.*formula.*$')))))==1 assert len(list(universe.triples((None,REGEXTerm('.*implies$'),None))))==1 for s,p,o in universe.triples((None,REGEXTerm('.*test.*'),None)): assert s==a assert o==c for s,p,o in formulaA.triples((None,REGEXTerm('.*type.*'),None)): assert o!=c or isinstance(o,BNode) #REGEX context matching assert len(list(universe.contexts((None,None,REGEXTerm('.*schema.*')))))==1 assert len(list(universe.contexts((None,REGEXTerm('.*'),None))))==3 #test optimized interfaces assert len(list(g.backend.subjects(RDF.type,[RDFS.Class,c])))==1 for subj in g.backend.subjects(RDF.type,[RDFS.Class,c]): assert isinstance(subj,BNode) assert len(list(g.backend.subjects(implies,[REGEXTerm('.*')])))==1 for subj in g.backend.subjects(implies,[formulaB,RDFS.Class]): assert subj.identifier == formulaA.identifier assert len(list(g.backend.subjects(REGEXTerm('.*'),[formulaB,c])))==2 assert len(list(g.backend.subjects(None,[formulaB,c])))==2 assert len(list(g.backend.subjects(None,[formulaB,c])))==2 assert len(list(g.backend.subjects([REGEXTerm('.*rdf-syntax.*'),d],None)))==2 assert len(list(g.backend.objects(None,RDF.type)))==1 assert len(list(g.backend.objects(a,[d,RDF.type])))==1 assert len(list(g.backend.objects(a,[d])))==1 assert len(list(g.backend.objects(a,None)))==1 assert len(list(g.backend.objects(a,[REGEXTerm('.*')])))==1 assert len(list(g.backend.objects([a,c],None)))==1 except: g.backend.destroy(configString) raise testRegex.non_standard_dep = True def testRun(): testN3Store('MySQL',configString) testRegex() testRun.non_standard_dep = True def profileTests(): from hotshot import Profile, stats p = Profile('rdflib-mysql.profile') p.runcall(testRun) p.close() s = stats.load('rdflib-mysql.profile') s.strip_dirs() s.sort_stats('time','cumulative','pcalls') #s.sort_stats('time','pcalls') s.print_stats(.1) s.print_callers(.1) s.print_callees(.1) profileTests.non_standard_dep = True if __name__=='__main__': testN3Store('MySQL',configString) testRegex() #profileTests() rdflib-2.4.2/test/util.pyc0000644000175000017500000000174711164176137014414 0ustar nachonachoÑò ¯Ic@sjddkZddklZddklZdeifd„ƒYZedjoeiddƒndS( iÿÿÿÿN(tLiteral(t NodePicklert UtilTestCasecBseZd„ZRS(cCsAtƒ}tdƒ}|i|i|ƒƒ}|i||ƒdS(NuHA test with a \n (backslash n), "©" , and newline and a second line. (RRtloadstdumpst assertEquals(tselftnptatb((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/util.pyt!test_to_bits_from_bits_round_trip s   (t__name__t __module__R (((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/util.pyRst__main__t defaultTestt test_suite( tunittesttrdflibRtrdflib.store.NodePicklerRtTestCaseRR tmain(((s4/Users/eikeon/rdflib-svn/branches/2.4.x/test/util.pyts  rdflib-2.4.2/test/sparql_parser_instability.pyc0000644000175000017500000000115611164176137020722 0ustar nachonachoÑò ¯Ic@s4dZd„Zee_edjo eƒndS(sL BASE . SELECT ?s WHERE { ?s ?p ?o }cCs$ddkl}|ƒitƒdS(Niÿÿÿÿ(tGraph(t rdflib.GraphRtqueryt BAD_SPARQL(R((sI/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_parser_instability.pyttest_bad_sparqlst__main__N(RRtTruetunstablet__name__(((sI/Users/eikeon/rdflib-svn/branches/2.4.x/test/sparql_parser_instability.pyts   rdflib-2.4.2/run_tests.py0000755000175000017500000001024611153616037014334 0ustar nachonacho#!/usr/bin/env python # -*- coding: utf-8 -*- """ Testing with Nose ================= This test runner uses Nose for test discovery and running. It uses the argument spec of Nose, but with some options pre-set. To begin with, make sure you have Nose installed, e.g.: $ sudo easy_install nose For daily test runs, use: $ ./run_tests.py If you supply attributes, the default ones defined in ``DEFAULT_ATTRS`` will be ignored. So to run e.g. all tests marked ``slowtest`` or ``non_standard_dep``, do: $ ./run_tests.py -a slowtest,non_standard_dep See for furher details. An excellent article is also available at . Note that this is just a convenience script. You can use ``nosetests`` directly if it's on $PATH, with the difference that you have to supply the options pre-set here manually. Coverage ======== If ``coverage.py`` is placed in $PYTHONPATH, it can be used to create coverage information (using the built-in coverage plugin of Nose) if the default option "--with-coverage" is supplied (which also enables some additional coverage options). See for details. """ NOSE_ARGS = [ '--where=./', '--with-doctest', '--doctest-extension=.doctest', '--doctest-tests', # '--with-EARL', ] COVERAGE_EXTRA_ARGS = [ '--cover-package=rdflib', '--cover-inclusive', ] DEFAULT_ATTRS = ['!slowtest', '!unstable', '!non_standard_dep'] DEFAULT_DIRS = ['test', 'rdflib'] if __name__ == '__main__': from sys import argv, exit, stderr try: import nose except ImportError: print >>stderr, """\ Requires Nose. Try: $ sudo easy_install nose Exiting. """; exit(1) if '--with-coverage' in argv: try: import coverage except ImportError: print >>stderr, "No coverage module found, skipping code coverage." argv.remove('--with-coverage') else: NOSE_ARGS += COVERAGE_EXTRA_ARGS if True not in [a.startswith('-a') or a.startswith('--attr=') for a in argv]: argv.append('--attr=' + ','.join(DEFAULT_ATTRS)) if not [a for a in argv[1:] if not a.startswith('-')]: argv += DEFAULT_DIRS # since nose doesn't look here by default.. finalArgs = argv + NOSE_ARGS print "Running nose with:", " ".join(finalArgs[1:]) nose.run(argv=finalArgs) # TODO: anything from the following we've left behind? old_run_tests = """ import logging _logger = logging.getLogger() _logger.setLevel(logging.ERROR) _formatter = logging.Formatter('%(name)s %(levelname)s %(message)s') _handler = logging.StreamHandler() _handler.setFormatter(_formatter) _logger.addHandler(_handler) import unittest, inspect import rdflib quick = True verbose = True from test.IdentifierEquality import IdentifierEquality from test.sparql.QueryTestCase import QueryTestCase from test.graph import * from test.triple_store import * from test.context import * # # Graph no longer has the type checking at the moment. Do we want to # # put it back? Should we? # # # # from test.type_check import * from test.parser import * if not quick: from test import parser_rdfcore if verbose: parser_rdfcore.verbose = 1 from test.parser_rdfcore import * from test.Sleepycat import * from test.rdf import * # how does this manage to be 9 tests? from test.n3 import * from test.n3_quoting import * from test.nt import * from test.util import * from test.seq import SeqTestCase #from test.store_performace import * from test.rules import * from test.n3Test import * from test.JSON import JSON import test.rdfa from test.events import * def run(): # TODO: Fix failed test and comment back in. # test.rdfa.main() if verbose: ts = unittest.makeSuite tests = [ c for c in vars().values() if inspect.isclass(c) and not isinstance(c, rdflib.Namespace) and issubclass(c, unittest.TestCase) ] suite = unittest.TestSuite(map(ts, tests)) unittest.TextTestRunner(verbosity=2).run(suite) else: unittest.main() """ rdflib-2.4.2/LICENSE0000644000175000017500000000310011153616037012725 0ustar nachonachoLICENSE AGREEMENT FOR RDFLIB 0.9.0 THROUGH 2.4.1 ------------------------------------------------ Copyright (c) 2002-2007, Daniel Krech, http://eikeon.com/ All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Daniel Krech nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. rdflib-2.4.2/README0000644000175000017500000000020411153616037012602 0ustar nachonacho See http://rdflib.net/2.4.1/ for the release notes. ... Also see http://rdflib.net/contributors/ for the list of contributors. rdflib-2.4.2/PKG-INFO0000644000175000017500000000242011204354476013024 0ustar nachonachoMetadata-Version: 1.0 Name: rdflib Version: 2.4.2 Summary: RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information. Home-page: http://rdflib.net/ Author: Daniel 'eikeon' Krech Author-email: eikeon@eikeon.com License: http://rdflib.net/latest/LICENSE Download-URL: http://rdflib.net/rdflib-2.4.2.tar.gz Description: RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information. The library contains parsers and serializers for RDF/XML, N3, NTriples, Turtle, TriX and RDFa . The library presents a Graph interface which can be backed by any one of a number of Store implementations, including, Memory, MySQL, Redland, SQLite, Sleepycat, ZODB and SQLObject. If you have recently reported a bug marked as fixed, or have a craving for the very latest, you may want the development version instead: http://svn.rdflib.net/trunk#egg=rdflib-dev Platform: any Classifier: Programming Language :: Python Classifier: License :: OSI Approved :: BSD License Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: OS Independent Classifier: Natural Language :: English rdflib-2.4.2/rdflib.egg-info/0000755000175000017500000000000011204354476014665 5ustar nachonachordflib-2.4.2/rdflib.egg-info/entry_points.txt0000644000175000017500000000016711204354475020166 0ustar nachonacho[nose.plugins] EARLPlugin = rdflib_tools.EARLPlugin:EARLPlugin [console_scripts] rdfpipe = rdflib_tools.RDFPipe:main rdflib-2.4.2/rdflib.egg-info/PKG-INFO0000644000175000017500000000242011204354475015757 0ustar nachonachoMetadata-Version: 1.0 Name: rdflib Version: 2.4.2 Summary: RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information. Home-page: http://rdflib.net/ Author: Daniel 'eikeon' Krech Author-email: eikeon@eikeon.com License: http://rdflib.net/latest/LICENSE Download-URL: http://rdflib.net/rdflib-2.4.2.tar.gz Description: RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information. The library contains parsers and serializers for RDF/XML, N3, NTriples, Turtle, TriX and RDFa . The library presents a Graph interface which can be backed by any one of a number of Store implementations, including, Memory, MySQL, Redland, SQLite, Sleepycat, ZODB and SQLObject. If you have recently reported a bug marked as fixed, or have a craving for the very latest, you may want the development version instead: http://svn.rdflib.net/trunk#egg=rdflib-dev Platform: any Classifier: Programming Language :: Python Classifier: License :: OSI Approved :: BSD License Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Operating System :: OS Independent Classifier: Natural Language :: English rdflib-2.4.2/rdflib.egg-info/dependency_links.txt0000644000175000017500000000000111204354475020732 0ustar nachonacho rdflib-2.4.2/rdflib.egg-info/SOURCES.txt0000644000175000017500000002377111204354476016563 0ustar nachonachoCHANGELOG LICENSE MANIFEST.in README ez_setup.py run_tests.py setup.cfg setup.py examples/example.py examples/swap_primer.py rdflib/BNode.py rdflib/Collection.py rdflib/FileInputSource.py rdflib/Graph.py rdflib/Identifier.py rdflib/Journal.py rdflib/Literal.py rdflib/Namespace.py rdflib/Node.py rdflib/QueryResult.py rdflib/RDF.py rdflib/RDFS.py rdflib/Statement.py rdflib/StringInputSource.py rdflib/TextIndex.py rdflib/TripleStore.py rdflib/URIRef.py rdflib/URLInputSource.py rdflib/Variable.py rdflib/__init__.py rdflib/compat.py rdflib/constants.py rdflib/events.py rdflib/exceptions.py rdflib/interfaces.py rdflib/plugin.py rdflib/term_utils.py rdflib/util.py rdflib.egg-info/PKG-INFO rdflib.egg-info/SOURCES.txt rdflib.egg-info/dependency_links.txt rdflib.egg-info/entry_points.txt rdflib.egg-info/top_level.txt rdflib/sparql/Algebra.py rdflib/sparql/Query.py rdflib/sparql/QueryResult.py rdflib/sparql/Unbound.py rdflib/sparql/__init__.py rdflib/sparql/graphPattern.py rdflib/sparql/parser.py rdflib/sparql/sparqlGraph.py rdflib/sparql/sparqlOperators.py rdflib/sparql/bison/Bindings.py rdflib/sparql/bison/Expression.py rdflib/sparql/bison/Filter.py rdflib/sparql/bison/FunctionLibrary.py rdflib/sparql/bison/GraphPattern.py rdflib/sparql/bison/IRIRef.py rdflib/sparql/bison/Operators.py rdflib/sparql/bison/Processor.py rdflib/sparql/bison/QName.py rdflib/sparql/bison/Query.py rdflib/sparql/bison/Resource.py rdflib/sparql/bison/SPARQLEvaluate.py rdflib/sparql/bison/SolutionModifier.py rdflib/sparql/bison/Triples.py rdflib/sparql/bison/Util.py rdflib/sparql/bison/__init__.py rdflib/store/AbstractSQLStore.py rdflib/store/AuditableStorage.py rdflib/store/BDBOptimized.py rdflib/store/BerkeleyDB.py rdflib/store/Concurrent.py rdflib/store/IOMemory.py rdflib/store/Memory.py rdflib/store/MySQL.py rdflib/store/NodePickler.py rdflib/store/PostgreSQL.py rdflib/store/REGEXMatching.py rdflib/store/Redland.py rdflib/store/SQLite.py rdflib/store/Sleepycat.py rdflib/store/ZODB.py rdflib/store/__init__.py rdflib/store/_sqlobject.py rdflib/store/FOPLRelationalModel/BinaryRelationPartition.py rdflib/store/FOPLRelationalModel/QuadSlot.py rdflib/store/FOPLRelationalModel/RelationalHash.py rdflib/store/FOPLRelationalModel/__init__.py rdflib/syntax/NamespaceManager.py rdflib/syntax/__init__.py rdflib/syntax/serializer.py rdflib/syntax/xml_names.py rdflib/syntax/parsers/N3Parser.py rdflib/syntax/parsers/NTParser.py rdflib/syntax/parsers/RDFXMLHandler.py rdflib/syntax/parsers/RDFXMLParser.py rdflib/syntax/parsers/RDFaParser.py rdflib/syntax/parsers/TriXHandler.py rdflib/syntax/parsers/TriXParser.py rdflib/syntax/parsers/__init__.py rdflib/syntax/parsers/ntriples.py rdflib/syntax/parsers/n3p/__init__.py rdflib/syntax/parsers/n3p/n3meta.py rdflib/syntax/parsers/n3p/n3p.py rdflib/syntax/parsers/n3p/n3proc.py rdflib/syntax/parsers/n3p/uripath.py rdflib/syntax/serializers/AbstractSerializer.py rdflib/syntax/serializers/N3Serializer.py rdflib/syntax/serializers/NTSerializer.py rdflib/syntax/serializers/PrettyXMLSerializer.py rdflib/syntax/serializers/QNameProvider.py rdflib/syntax/serializers/RecursiveSerializer.py rdflib/syntax/serializers/TurtleSerializer.py rdflib/syntax/serializers/XMLSerializer.py rdflib/syntax/serializers/XMLWriter.py rdflib/syntax/serializers/__init__.py rdflib_tools/EARLPlugin.py rdflib_tools/RDFPipe.py rdflib_tools/README rdflib_tools/__init__.py src/bison/README.txt src/bison/SPARQL.bgen src/bison/SPARQLLexerDefines.bgen.frag src/bison/SPARQLLexerPatterns.bgen.frag src/bison/SPARQLLiteralLexerPatterns.bgen.frag src/bison/SPARQLParser.c src/bison/SPARQLParser.ebnf src/bison/SPARQLParser.output src/bison/SPARQLTokens.bgen.frag src/bison/SPARQLTurtleSuperSet.bgen.frag test/IdentifierEquality.py test/IdentifierEquality.pyc test/JSON.py test/JSON.pyc test/Sleepycat.py test/Sleepycat.pyc test/__init__.py test/__init__.pyc test/a.n3 test/advanced_sparql_constructs.py test/advanced_sparql_constructs.pyc test/aggregate_graphs.py test/aggregate_graphs.pyc test/bdb_optimized.py test/bdb_optimized.pyc test/comparison_graph test/context.py test/context.pyc test/events.py test/events.pyc test/graph.py test/graph.pyc test/leaves.py test/leaves.pyc test/leaves.txt test/mysql.py test/mysql.pyc test/n3.py test/n3.pyc test/n3Test.py test/n3_2.py test/n3_2.pyc test/n3_quoting.py test/n3_quoting.pyc test/nt.py test/nt.pyc test/ntriples.py test/ntriples.pyc test/parser.py test/parser.pyc test/parser_rdfcore.py test/parser_rdfcore.pyc test/postgres.py test/rdf.py test/rdf.pyc test/rdf_lists.py test/rdfa.py test/rdfa.pyc test/rdfdiff.py test/rdfdiff.pyc test/rules.py test/rules.pyc test/seq.py test/seq.pyc test/sparql_empty_prefix.py test/sparql_empty_prefix.pyc test/sparql_limit.py test/sparql_limit.pyc test/sparql_order_by.py test/sparql_order_by.pyc test/sparql_parser_instability.py test/sparql_parser_instability.pyc test/sparql_parser_nestedbrackets.py test/sparql_parser_nestedbrackets.pyc test/sparql_regex.py test/sparql_regex.pyc test/store_performace.py test/store_performace.pyc test/test_bdb_transaction.py test/test_bdb_transaction.pyc test/test_datatype_encoding_mysql.py test/test_datatype_encoding_mysql.pyc test/test_datatype_parsing.py test/test_datatype_parsing.pyc test/test_datetime.py test/test_datetime.pyc test/test_empty_xml_base.py test/test_empty_xml_base.pyc test/test_issue_45.py test/test_issue_45.pyc test/test_literal.py test/test_literal.pyc test/test_not_equals.py test/test_not_equals.pyc test/test_sparql_advanced.py test/test_sparql_advanced.pyc test/test_sparql_advanced.txt test/test_sparql_base_ref.py test/test_sparql_base_ref.pyc test/test_sparql_date_filter.py test/test_sparql_date_filter.pyc test/test_sparql_equals.py test/test_sparql_equals.pyc test/test_sparql_filter_bound.py test/test_sparql_filter_bound.pyc test/test_sparql_filters.py test/test_sparql_filters.pyc test/test_sparql_graph_graph_pattern.py test/test_sparql_graph_graph_pattern.pyc test/test_sparql_graph_graph_pattern.py~ test/test_sparql_json_results.py test/test_sparql_json_results.pyc test/test_sparql_literal_patterns.py test/test_sparql_literal_patterns.pyc test/test_sparql_naf.py test/test_sparql_naf.pyc test/test_sparql_naf2.py test/test_sparql_naf2.pyc test/test_sparql_told_bnodes.py test/test_sparql_told_bnodes.pyc test/test_sparql_xml_results.py test/test_sparql_xml_results.pyc test/triple_store.py test/triple_store.pyc test/trix.py test/type_check.py test/type_check.pyc test/util.py test/util.pyc test/BisonSPARQLParser/README.txt test/BisonSPARQLParser/test.py test/broken_parse_test/n3-writer-test-02.n3 test/broken_parse_test/n3-writer-test-25.n3 test/broken_parse_test/n3-writer-test-27.n3 test/broken_parse_test/n3-writer-test-29.n3 test/broken_parse_test/rdf-test-01.n3 test/broken_parse_test/rdf-test-08.n3 test/broken_parse_test/rdf-test-10.n3 test/broken_parse_test/rdf-test-24.n3 test/broken_parse_test/rdf-test-25.n3 test/broken_parse_test/rdf-test-27.n3 test/n3/listTest.n3 test/n3/longString.n3 test/n3/longString.rdf test/n3/n3-writer-test-01.n3 test/n3/n3-writer-test-03.n3 test/n3/n3-writer-test-04.n3 test/n3/n3-writer-test-05.n3 test/n3/n3-writer-test-06.n3 test/n3/n3-writer-test-07.n3 test/n3/n3-writer-test-08.n3 test/n3/n3-writer-test-09.n3 test/n3/n3-writer-test-10.n3 test/n3/n3-writer-test-11.n3 test/n3/n3-writer-test-12.n3 test/n3/n3-writer-test-13.n3 test/n3/n3-writer-test-14.n3 test/n3/n3-writer-test-15.n3 test/n3/n3-writer-test-16.n3 test/n3/n3-writer-test-17.n3 test/n3/n3-writer-test-18.n3 test/n3/n3-writer-test-19.n3 test/n3/n3-writer-test-20.n3 test/n3/n3-writer-test-21.n3 test/n3/n3-writer-test-22.n3 test/n3/n3-writer-test-23.n3 test/n3/n3-writer-test-24.n3 test/n3/n3-writer-test-26.n3 test/n3/n3-writer-test-28.n3 test/n3/rdf-test-02.n3 test/n3/rdf-test-03.n3 test/n3/rdf-test-04.n3 test/n3/rdf-test-05.n3 test/n3/rdf-test-06.n3 test/n3/rdf-test-07.n3 test/n3/rdf-test-09.n3 test/n3/rdf-test-11.n3 test/n3/rdf-test-12.n3 test/n3/rdf-test-13.n3 test/n3/rdf-test-14.n3 test/n3/rdf-test-15.n3 test/n3/rdf-test-16.n3 test/n3/rdf-test-17.n3 test/n3/rdf-test-18.n3 test/n3/rdf-test-19.n3 test/n3/rdf-test-20.n3 test/n3/rdf-test-21.n3 test/n3/rdf-test-22.n3 test/n3/rdf-test-23.n3 test/n3/rdf-test-26.n3 test/n3/rdf-test-28.n3 test/rdfa/000001.htm test/rdfa/000001.ttl test/rdfa/000002.htm test/rdfa/000002.ttl test/rdfa/000003.htm test/rdfa/000003.ttl test/rdfa/000004.htm test/rdfa/000004.ttl test/rdfa/000005.htm test/rdfa/000005.ttl test/rdfa/000006.htm test/rdfa/000006.ttl test/rdfa/000007.htm test/rdfa/000007.ttl test/rdfa/000008.htm test/rdfa/000008.ttl test/rdfa/000009.htm test/rdfa/000009.ttl test/rdfa/000010.htm test/rdfa/000010.ttl test/rdfa/000011.htm test/rdfa/000011.ttl test/rdfa/000012.htm test/rdfa/000012.ttl test/rdfa/000013.htm test/rdfa/000013.ttl test/rdfa/000014.htm test/rdfa/000014.ttl test/rdfa/000015.htm test/rdfa/000015.ttl test/rdfa/000016.htm test/rdfa/000016.ttl test/rdfa/000017.htm test/rdfa/000017.ttl test/rdfa/000018.htm test/rdfa/000018.ttl test/rdfa/000019.htm test/rdfa/000019.ttl test/rdfa/000020.htm test/rdfa/000020.ttl test/rdfa/000021.htm test/rdfa/000021.ttl test/rdfa/000022.htm test/rdfa/000022.ttl test/rdfa/000023.htm test/rdfa/000023.ttl test/serializers/__init__.py test/serializers/__init__.pyc test/serializers/test_prettyxml.py test/serializers/test_prettyxml.pyc test/sparql/QueryTestCase.py test/sparql/QueryTestCase.pyc test/sparql/README test/sparql/__init__.py test/sparql/__init__.pyc test/sparql/testSPARQL.py test/sparql/ConstructTests/Test10_21.py test/sparql/ConstructTests/Test10_22.py test/sparql/ConstructTests/Test10_23.py test/sparql/ConstructTests/constuctTest.py test/sparql/QueryTests/Test1.py test/sparql/QueryTests/Test11_3.py test/sparql/QueryTests/Test2_5.py test/sparql/QueryTests/Test2_6.py test/sparql/QueryTests/Test3_1_1.py test/sparql/QueryTests/Test3_1_2.py test/sparql/QueryTests/Test3_1_3.py test/sparql/QueryTests/Test3_1_4.py test/sparql/QueryTests/Test3_2.py test/sparql/QueryTests/Test5_1.py test/sparql/QueryTests/Test5_2.py test/sparql/QueryTests/Test5_3.py test/sparql/QueryTests/Test6_11.py test/sparql/QueryTests/Test6_12.py test/sparql/QueryTests/queryTest.py test/trix/aperture.trix test/trix/nokia_example.trixrdflib-2.4.2/rdflib.egg-info/top_level.txt0000644000175000017500000000003111204354475017410 0ustar nachonachotest rdflib rdflib_tools rdflib-2.4.2/CHANGELOG0000644000175000017500000006143611204354211013140 0ustar nachonacho2009/05/18 RELEASE 2.4.2 (#58) Removed a stderr print inside the SQLite store implementation. Added PostgreSQL store implementation. Fixed an issue where additional arguments were not always being passed to the serializers. Added an include_base option to the XMLSerializer. 2009/03/30 RELEASE 2.4.1 Fixed Literal comparison case involving Literal's with datatypes of XSD.base64Binary. Fixed case where XSD.date was matching before XSD.dateTime for datetime instances. Fixed jython interoperability issue (issue #53). Fixed Literal repr to handle apostrophes correctly (issue #28). Fixed Literal's repr to be consistent with its __init__ (issue #33). 2007/04/04 RELEASE 2.4.0 Improved Literal comparison / equality Sparql cleanup. getLiteralValue now returns the Literal object instead of the result of toPython(). Now that Literals override a good coverage of comparison operators, they should be passed around as first class objects in the SPARQL evaluation engine. Added support for session bnodes re: sparql Fixed prolog reduce/reduce conflict. Added Py_None IncRefs where they were being passed into Python method invokations (per drewp's patch) Fixed sparql queries involving empty namespace prefix. Fixed the selected variables sparql issue Fixed support in SPARQL queries. Fixed involving multiple unions and queries are nested more than one level (bug in _getAllVariables causing failure when parent.top is None) Fixed test_sparql_equals.py. Fixed sparql json result comma errors issue. Fixed test_sparql_json_results.py (SELECT * variables out of order) Added a 4Suite-based SPARQL XML Writer implementation. If 4Suite is not installed, the fallback python saxutils is used instead applied patch from http://rdflib.net/issues/2007/02/23/bugs_in_rdflib.sparql.queryresult/issue The restriction on GRAPH patterns with variables has been relieved a bit to allow such usage when the variable is provided as an initial binding Fix for OPTIONAL patterns. P1 OPT P2, where P1 and P2 shared variables which were bound to BNodes were not unifying on these BNode variable efficiently / correctly. The fix was to add bindings for 'stored' BNodes so they aren't confused for wildcards Added support to n3 parser for retaining namespace bindings. Fixed several RDFaParser bugs. Added serializer specific argument support. Fixed a few PrettyXMLSerializer issues and added a max_depth option. Fixed some TurtleSerializer issues. Fixed some N3Serializer issues. Added support easy_install added link to long_descriptin for easy_install -U rdflib==dev to work; added download_url back added continuous-releases-using-subversion bit Added rdflib_tools package Added rdfpipe Added initial EARLPluging Improved test running... using nose... added tests Exposed generated test cases for nose to find. added bit to configure 'setup.py nosetests' to run doc tests added nose test bits Added md5_term_hash method to terms. Added commit_pending_transaction argument to Graph's close method. Added DeprecationWarning to rdflib.constants Added a NamespaceDict class for those who want to avoid the Namespace as subclass of URIRef issues Added bind function Fixed type of Namespace re: URIRef vs. unicode Improved ValueError message Changed value method's any argument to default to True Changed __repr__ to always reflect that it's an rdf.Literal -- as this is the case even though we now have it acting like the corresponding type in some casses A DISTINCT was added to the SELECT clause to ensure duplicate triples are not returned (an RDF graph is a set of triples) - which can happen for certain join expressions. Support for ConditionalAndExpressionList and RelationalExpressionList (|| and && operators in FILTER) Fixed context column comparison. The hash integer was being compared with 'F' causing a warning:Warning: Truncated incorrect DOUBLE value: 'F' applied patch in http://rdflib.net/issues/2006/12/13/typos_in_abstractsqlstore.py/issue fix for http://rdflib.net/issues/2006/12/07/problems_with_graph.seq()_when_sequences_contain_more_than_9_items./issue General code cleanup (removing redundant imports, changing relative imports to absolute imports etc) Removed usage of deprecated bits. Added a number of test cases. Added DeprecationWarning for save method refactoring of GraphPattern ReadOnlyGraphAggregate uses Graph constructor properly to setup (optionally) a common store Fixed bug with . (fullstop) in localname parts. Changed Graph's value method to return None instead of raising an AssertionError. Fixed conversion of (exiplicit) MySQL ports to integers. Fixed MySQL store so it properly calculates __len__ of individual Graphs Aligned with how Sleepycat is generating events (remove events are expressed in terms of interned strings) Added code to catch unpickling related exceptions Added BerkeleyDB store implementation. Merged TextIndex from michel-events branch. 2006/10/15 RELEASE 2.3.3 Added TriXParser, N3Serializer and TurtleSerializer. Added events to store interface: StoreCreated, TripleAdded and TripleRemoved. Added Journal Reader and Writer. Removed Sleepycat level journaling. Added support for triple quoted Literal's. Fixed some corner cases with Literal comparison. Fixed PatternResolution for patterns that return contexts only. Fixed NodePickler not to choke on unhashable objects. Fixed Namespace's __getattr__ hack to ignore names starting with __ Added SPARQL != operator. Fixed query result __len__ (more efficient). Fixed and improved RDFa parser. redland patches from http://rdflib.net/pipermail/dev/2006-September/000069.html various patches for the testsuite - http://rdflib.net/pipermail/dev/2006-September/000069.html 2006/08/01 RELEASE 2.3.2 Added SPARQL query support. Added XSD to/from Python datatype support to Literals. Fixed ConjunctiveGraph so that it is a proper subclass of Graph. Added Deprecation Warning when BackwardCompatGraph gets used. Added RDFa parser. Added Collection Class for working with RDF Collections. Added method to Graph for testing connectedness Fixed bug in N3 parser where identical BNodes were not being combined. Fixed literal quoting in N3 serializer. Fixed RDF/XML serializer to skip over N3 bits. Changed Literal and URIRef instanciation to catch UnicodeDecodeErrors - which were being thrown when the default decoding method (ascii) was hitting certain characters. Changed Graph's bind method to also override the binding in the case of an existing generated bindings. Added FOPLRelationalModel - a set of utility classes that implement a minimal Relational Model of FOPL implemented as a SQL database (uses identifier/value interning and integer half-md5-hashes for space and index efficiency). Changed MySQL store to use FOPLRelationalModel plus fixes and improvements. Added more test cases. Cleaned up source code to follow pep8 / pep257. 2006/02/27 RELEASE 2.3.1 Added save method to BackwardCompatibleGraph so that example.py etc work again. Applied patch from Drew Perttula to add local_time_zone argument to util's date_time method. Fixed a relativize bug in the rdf/xml serializer. Fixed NameError: global name 'URIRef' is not defined error in Sleepycat.py by adding missing import. Applied patch for Seq to sort list by integer, added by Drew Hess. Added a preserve_bnode_ids option to rdf/xml parser. Applied assorted patches for tests (see http://tracker.asemantics.com/rdflib/ticket/8 ) Applied redland.diff (see http://tracker.asemantics.com/rdflib/ticket/9 ) Applied changes specified http://tracker.asemantics.com/rdflib/ticket/7 Added a set method to Graph. Fixed RDF/XML serializer so that it does not choke on n3 bits (rather it'll just ignore them) 2005/12/23 RELEASE 2.3.0 See http://rdflib.net/2.3.0/ for most up-to-date release notes Added N3 support to Graph and Store. Added Sean's n3p parser, and ntriples parser. Sleepycat implementation has been revamped in the process of expanding it to support the new requirements n3 requirements. It also now persists a journal -- more to come. detabified source files. Literal and parsers now distinguish between datatype of None and datatype of "". Store-agnostic 'fallback' implementation of REGEX matching (inefficient but provides the capability to stores that don't support it natively). Implemented as a 'wrapper' around any Store which replaces REGEX terms with None (before dispatching to the store) and whittles out results that don't match the given REGEX term expression(s). Store-agnostic 'fallback' implementation of transactional rollbacks (also inefficient but provides the capablity to stores that don't support it natively). Implemented as a wrapper that tracks a 'thread-safe' list of reversal operations (for every add, track the remove call that reverts the store, and vice versa). Upon store.rollback(), execute the reverse operations. However, this doesn't guarantee durability, since if the system fails before the rollbacks are all executed, the store will remain in an invalid state, but it provides Atomicity in the best case scenario. 2005/10/10 RELEASE 2.2.3 Fixed Sleepycat backend to commit after an add and remove. This should help just a bit with those unclean shutdowns ;) Fixed use of logging so that it does not mess with the root logger. Thank you, Arve, for pointing this one out. Fixed Graph's value method to have default for subject in addition to predicate and object. Fixed Fourthought backend to be consistent with interface. It now supports an empty constructor and an open method that takes a configuration string. 2005/09/10 RELEASE 2.2.2 Applied patch from inkel to add encoding argument to all serialization related methods. Fixed XMLSerializer bug regarding default namespace bindings. Fixed namespace binding bug involving binding a second default namespace. Applied patch from Gunnar AAstrand Grimnes to add context support to __iadd__ on Graph. (Am considering the lack of context support a bug. Any users currently using __iadd__, let me know if this breaks any of your code.) Added Fourthought backend contributed by Chimezie Ogbuji. Fixed a RDF/XML parser bug relating to XMLLiteral and escaping. Fixed setup.py so that install does not try to uninstall (rename_old) before installing; there's now an uninstall command if one needs to uninstall. 2005/08/25 RELEASE 2.2.1 Fixed issue regarding Python2.3 compatibility. Fixed minor issue with URIRef's absolute method. 2005/08/12 RELEASE 2.1.4 Added optional base argument to URIRef. Fixed bug where load and parse had inconsistent behavior. Added a FileInputSource. Added skeleton sparql parser and test framework. Included pyparsing (pyparsing.sourceforge.net) for sparql parsing. Added attribute support to namespaces. 2005/06/28 RELEASE 2.1.3 Added Ivan's sparql-p implementation. Literal is now picklable. Added optional base argument to serialize methods about which to relativize. Applied patch to remove some dependencies on Python 2.4 features. Fixed BNode's n3 serialization bug (recently introduced). Fixed a collections related bug. 2005/05/13 RELEASE 2.1.2 Added patch from Sidnei da Silva that adds a sqlobject based backend. Fixed bug in PrettyXMLSerializer (rdf prefix decl was missing sometimes) Fixed bug in RDF/XML parser where empty collections where causing exceptions. 2005/05/01 RELEASE 2.1.1 Fixed a number of bugs relating to 2.0 backward compatibility. Fixed split_uri to handle URIs with _ in them properly. Fixed bug in RDF/XML handler's absolutize that would cause some URIRefs to end in ## Added check_context to Graph. Added patch the improves IOMemory implementation. 2005/04/12 RELEASE 2.1.0 Merged TripleStore and InformationStore into Graph. Added plugin support (or at least cleaned up, made consistent the plugin support that existed). Added value and seq methods to Graph. Renamed prefix_mapping to bind. Added namespaces method that is a generator over all prefix, namespace bindings. Added notion of NamespaceManager. Added couple new backends, IOMemory and ZODB. 2005/03/19 RELEASE 2.0.6 Added pretty-xml serializer (inlines BNodes where possible, typed nodes, Collections). Fixed bug in NTParser and n3 methods where not all characters where being escaped. Changed label and comment methods to return default passed in when there is no label or comment. Moved methods to Store Class. Store no longer inherits from Schema. Fixed bug involving a case with rdf:about='#' Changed InMemoryBackend to update third index in the same style it does the first two. 2005/01/08 RELEASE 2.0.5 Added publicID argument to Store's load method. Added RDF and RDFS to top level rdflib package. 2004/10/14 RELEASE 2.0.4 Removed unfinished functionality. Fixed bug where another prefix other than rdf was getting defined for the rdf namespace (causing an assertion to fail). Fixed bug in serializer where nodeIDs were not valid NCNames. 2004/04/21 RELEASE 2.0.3 Added missing "from __future__ import generators" statement to InformationStore. Simplified RDF/XML serializer fixing a few bugs involving BNodes. Added a reset method to RDF/XML parser. Changed 'if foo' to "if foo is not None" in a few places in the RDF/XML parser. Fully qualified imports in rdflib.syntax {parser, serializer}. Context now goes through InformationStore (was bypassing it going directly to backend). 2004/03/22 RELEASE 2.0.2 Improved performance of Identifier equality tests. Added missing "from __future__ import generators" statements needed to run on Python2.2. Added alternative to shlib.move() if it isn't present. Fixed bug that occured when specifying a backend to InformationStore's constructor. Fixed bug recently introduced into InformationStore's remove method. 2004/03/15 RELEASE 2.0.1 Fixed a bug in the SleepyCatBackend multi threaded concurrency support. (Tested fairly extensively under the following conditions: multi threaded, multi process, and both). NOTE: fix involved change to database format -- so 2.0.1 will not be able to open databases created with 2.0.0 Removed the use of the Concurrent wrapper around InMemoryBackend and modified InMemoryBackend to handle concurrent requests. (Motivated by Concurrent's poor performance on bigger TripleStores.) Improved the speed of len(store) by making backends responsible for implementing __len__. Context objects now have a identifier property. 2004/03/10 RELEASE 2.0.0 Fixed a few bugs in the SleepyCatBackend multi process concurrency support. Removed rdflib.Resource Changed remove to now take a triple pattern and removed remove_triples method. Added __iadd__ method to Store in support of store += another_store. 2004/01/04 RELEASE 1.3.2 Added a serialization dispatcher. Added format arg to save method. Store now remembers prefix/namespace bindings. Backends are now more pluggable ... 2003/10/14 RELEASE 1.3.1 Fixed bug in serializer where triples where only getting serialized the first time. Added type checking for contexts. Fixed bug that caused comparisons with a Literal to fail when the right hand side was not a string. Added DB_INIT_CDB flag to SCBacked for supporting multiple reader/single writer access Changed rdf:RDF to be optional to conform with latest spec. Fixed handling of XMLLiterals 2003/04/40 RELEASE 1.3.0 Removed bag_id support and added it to OLD_TERMS. Added a double hash for keys in SCBacked. Fixed _HTTPClient so that it no longer removes metadata about a context right after it adds it. Added a KDTreeStore and RedlandStore backends. Added a StoreTester. 2003/02/28 RELEASE 1.2.4 Fixed bug in SCBackend where language and datatype information where being ignored. Fixed bug in transitive_subjects. Updated some of the test cases that where not up to date. async_load now adds more http header and error information to the InformationStore. 2003/02/11 RELEASE 1.2.3 Fixed bug in load methods where relative URLs where not being absolutized correctly on Windows. Fixed serializer so that it throws an exception when trying to serialize a graph with a predicate that can not be split. 2003/02/07 RELEASE 1.2.2 Added an exists method to the BackwardCompatibility mixin. Added versions of remove, remove_triples and triples methods to the BackwardCompatility mixin for TripleStores that take an s, p, o as opposed to an (s, p, o). 2003/02/03 RELEASE 1.2.1 Added support for parsing XMLLiterals. Added support for proper charmod checking (only works in Python2.3). Fixed remaining rdfcore test cases that where not passing. Fixed windows bug in AbstractInformationStore's run method. 2003/01/02 RELEASE 1.2.0 Added systemID, line #, and column # to error messages. BNode prefix is now composed of ascii_letters instead of letters. Added a bsddb backed InformationStore. Added an asyncronous load method, methods for scheduling context updates, and a run method. 2002/12/16 RELEASE 1.1.5 Introduction of InformationStore, a TripleStore with the addition of context support. Resource __getitem__ now returns object (no longer returns a Resource for the object). Fixed bug in parser that was introduced in last release regaurding unqualified names. 2002/12/10 RELEASE 1.1.4 Interface realigned with last stable release. Serializer now uses more of the abbreviated forms where possible. Parser optimized and cleaned up. Added third index to InMemoryStore. The load and parse methods now take a single argument. Added a StringInputSource for to support parsing from strings. Renamed rdflib.BTreeTripleStore.TripleStore to rdflib.BTreeTripleStore.BTreeTripleStore. Minor reorganization of mix-in classes. 2002/12/03 RELEASE 1.1.3 BNodes now created with a more unique identifier so BNodes from different sessions do not collide. Added initial support for XML Literals (for now they are parsed into Literals). Resource is no longer a special kind of URIRef. Resource no longer looks at range to determine default return type for __getitem__. Instead there is now a get(predicate, default) method. 2002/11/21 RELEASE 1.1.2 Fixed Literal's __eq__ method so that Literal('foo')=='foo' etc. Fixed Resource's __setitem__ method so that it does not raise a dictionary changed size while iterating exception. 2002/11/09 RELEASE 1.1.1 Resource is now a special kind of URIRef Resource's __getitem__ now looks at rdfs:range to determine return type in default case. 2002/11/05 RELEASE 1.1.0 # A new development branch Cleaned up interface and promoted it to SIR: Simple Interface for RDF. Updated parser to use SAX2 interfaces instead of using expat directly. Added BTreeTripleStore, a ZODB BTree TripleStore backend. And a default pre-mixed TripleStore that uses it. Synced with latest (Editor's draft) RDF/XML spec. Added datatype support. Cleaned up interfaces for load/parse: removed generate_path from loadsave andrenamed parse_URI to parse. 2002/10/08 RELEASE 0.9.6 # The end of a development brant BNode can now be created with specified value. Literal now has a language attribute. Parser now creates Literals with language attribute set appropriately as determined by xml:lang attributes. TODO: Serializer-Literals-language attribute TODO: Change __eq__ so that Literal("foo")=="foo" etc TripleStores now support "in" operator. For example: if (s, p, o) in store: print "Found ", s, p, o Added APIs/object for working at level of a Resource. NOTE: This functionality is still experimental Consecutive Collections now parse correctly. 2002/08/06 RELEASE 0.9.5 Added support for rdf:parseType="Collection" Added items generator for getting items in a Collection Renamed rdflib.triple_store to rdflib.TripleStore to better follow python style conventions. Added an Identifier Class Moved each node into its own Python module. Added rdflib.util with a first and uniq function. Added a little more to example.py Removed generate_uri since we have BNodes now. 2002/07/29 RELEASE 0.9.4 Added support for proposed rdf:nodeID to both the parser and serializer. Reimplemented serializer which now nests things where possible. Added partial support for XML Literal parseTypes. 2002/07/16 RELEASE 0.9.3 Fixed bug where bNodes where being created for nested property elements when they where not supposed to be. Added lax mode that will convert rdf/xml files that contain bare IDs etc. Also, lax mode will only report parse errors instead of raising exceptions. Added missing check for valid attribute names in the case of production 5.18 of latest WD spec. 2002/07/05 RELEASE 0.9.2 Added missing constants for SUBPROPERTYOF, ISDEFINEDBY. Added test case for running all of the rdf/xml test cases. Reimplemented rdf/xml parser to conform to latest WD. 2002/06/10 RELEASE 0.9.1 There is now a remove and a remove_triples (no more overloaded remove). Layer 2 has been merged with layer 1 since there is no longer a need for them to be separate layers. The generate_uri method has moved to LoadSave since triple stores do not have a notion of a uri. [Also, with proper bNode support on its way the need for a generate_uri might not be as high.] Fixed bug in node's n3 function: URI -> URIRef. Replaced string based exceptions with class based exceptions. Added PyUnit TestCase for parser.py Added N-Triples parser. Added __len__ and __eq__ methods to store interface. 2002/06/04 RELEASE 0.9.0 Initial release after being split from redfootlib. rdflib-2.4.2/setup.cfg0000644000175000017500000000020511204354477013550 0ustar nachonacho[egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 [nosetests] exclude = (.*(store|mysql).*) verbosity = 2 with-doctest = 1 rdflib-2.4.2/MANIFEST.in0000644000175000017500000000012511164220037013453 0ustar nachonachoinclude CHANGELOG include LICENSE include example.py include run_tests.py graft test rdflib-2.4.2/rdflib/0000755000175000017500000000000011204354476013173 5ustar nachonachordflib-2.4.2/rdflib/Namespace.py0000644000175000017500000000254211153616035015437 0ustar nachonachofrom rdflib.URIRef import URIRef import logging _logger = logging.getLogger(__name__) class Namespace(URIRef): def term(self, name): return URIRef(self + name) def __getitem__(self, key, default=None): return self.term(key) def __getattr__(self, name): if name.startswith("__"): # ignore any special Python names! raise AttributeError else: return self.term(name) class NamespaceDict(dict): def __new__(cls, uri=None, context=None): inst = dict.__new__(cls) inst.uri = uri # TODO: do we need to set these both here and in __init__ ?? inst.__context = context return inst def __init__(self, uri, context=None): self.uri = uri self.__context = context def term(self, name): uri = self.get(name) if uri is None: uri = URIRef(self.uri + name) if self.__context and (uri, None, None) not in self.__context: _logger.warning("%s not defined" % uri) self[name] = uri return uri def __getattr__(self, name): return self.term(name) def __getitem__(self, key, default=None): return self.term(key) or default def __str__(self): return self.uri def __repr__(self): return """rdflib.NamespaceDict('%s')""" % str(self.uri) rdflib-2.4.2/rdflib/StringInputSource.py0000644000175000017500000000071011153616035017205 0ustar nachonachofrom urllib2 import urlopen, Request from xml.sax.xmlreader import InputSource from rdflib import __version__ from StringIO import StringIO class StringInputSource(InputSource, object): def __init__(self, value, system_id=None): super(StringInputSource, self).__init__(system_id) stream = StringIO(value) self.setByteStream(stream) # TODO: # encoding = value.encoding # self.setEncoding(encoding) rdflib-2.4.2/rdflib/TripleStore.py0000644000175000017500000000075311153616035016021 0ustar nachonacho"""Deprecated; use Graph.""" from rdflib.Graph import Graph from rdflib.store.Memory import Memory class TripleStore(Graph): """ Depcrecated. Use Graph instead. """ def __init__(self, location=None, backend=None): if backend==None: backend = Memory() super(TripleStore, self).__init__(backend=backend) if location: self.load(location) def prefix_mapping(self, prefix, namespace): self.bind(prefix, namespace) rdflib-2.4.2/rdflib/FileInputSource.py0000644000175000017500000000050411153616035016617 0ustar nachonachofrom xml.sax.xmlreader import InputSource class FileInputSource(InputSource, object): def __init__(self, file): super(FileInputSource, self).__init__(`file`) self.file = file self.setByteStream(file) # TODO: self.setEncoding(encoding) def __repr__(self): return `self.file` rdflib-2.4.2/rdflib/URIRef.py0000644000175000017500000000460111153616035014635 0ustar nachonachofrom sys import version_info try: from hashlib import md5 except ImportError: from md5 import md5 if version_info[0:2] > (2, 2): from unicodedata import normalize else: normalize = None from urlparse import urlparse, urljoin, urldefrag from rdflib.Identifier import Identifier from rdflib.compat import rsplit class URIRef(Identifier): """ RDF URI Reference: http://www.w3.org/TR/rdf-concepts/#section-Graph-URIref """ __slots__ = () def __new__(cls, value, base=None): if base is not None: ends_in_hash = value.endswith("#") value = urljoin(base, value, allow_fragments=1) if ends_in_hash: if not value.endswith("#"): value += "#" #if normalize and value and value != normalize("NFC", value): # raise Error("value must be in NFC normalized form.") try: rt = unicode.__new__(cls,value) except UnicodeDecodeError: rt = unicode.__new__(cls,value,'utf-8') return rt def n3(self): return "<%s>" % self def concrete(self): if "#" in self: return URIRef("/".join(rsplit(self, "#", 1))) else: return self def abstract(self): if "#" not in self: scheme, netloc, path, params, query, fragment = urlparse(self) if path: return URIRef("#".join(rsplit(self, "/", 1))) else: if not self.endswith("#"): return URIRef("%s#" % self) else: return self else: return self def defrag(self): if "#" in self: url, frag = urldefrag(self) return URIRef(url) else: return self def __reduce__(self): return (URIRef, (unicode(self),)) def __getnewargs__(self): return (unicode(self), ) def __ne__(self, other): return not self.__eq__(other) def __eq__(self, other): if isinstance(other, URIRef): return unicode(self)==unicode(other) else: return False def __str__(self): return self.encode("unicode-escape") def __repr__(self): return """rdflib.URIRef('%s')""" % str(self) def md5_term_hash(self): d = md5(str(self)) d.update("U") return d.hexdigest() rdflib-2.4.2/rdflib/Journal.py0000644000175000017500000000444611153616035015162 0ustar nachonachoimport logging _logger = logging.getLogger(__name__) from rdflib.Graph import QuotedGraph from rdflib.events import Event, Dispatcher from rdflib.store import TripleAddedEvent, TripleRemovedEvent, StoreCreatedEvent class JournalWriter(object): """ Writes a journal of the store events. """ def __init__(self, store, stream=None, filename=None): if stream is None: assert filename, "Must specify either stream or filename" stream = file(filename, "ab") dispatcher = store.dispatcher dispatcher.subscribe(TripleAddedEvent, self.journal_event) dispatcher.subscribe(TripleRemovedEvent, self.journal_event) dispatcher.subscribe(StoreCreatedEvent, self.journal_event) self._dumps = store.node_pickler.dumps self._write = stream.write def journal_event(self, event): self._write(self._dumps(event)) self._write("\n\n") class JournalReader(object): """ Reads a journal of store events into a store. """ def __init__(self, store, filename): self.stream = file(filename, "rb") self.store = store dispatcher = Dispatcher() dispatcher.subscribe(TripleAddedEvent, self.add) dispatcher.subscribe(TripleRemovedEvent, self.remove) dispatcher.subscribe(StoreCreatedEvent, self.store_created) loads = store.node_pickler.loads dispatch = dispatcher.dispatch lines = [] for line in self.stream: if line=="\n": try: event = loads("".join(lines)) dispatch(event) lines = [] except Exception, e: _logger.exception(e) _logger.debug("lines: '%s'" % lines) lines = [] else: lines.append(line) def add(self, event): context = event.context quoted = isinstance(context, QuotedGraph) self.store.add(event.triple, context, quoted) def remove(self, event): self.store.remove(event.triple, event.context) def store_created(self, event): n = len(self.store) if n>0: _logger.warning("Store not empty for 'store created'. Contains '%s' assertions" % n) # TODO: clear store rdflib-2.4.2/rdflib/store/0000755000175000017500000000000011204354476014327 5ustar nachonachordflib-2.4.2/rdflib/store/SQLite.py0000644000175000017500000004471611171436362016054 0ustar nachonachofrom __future__ import generators from rdflib import BNode from rdflib.Literal import Literal from pprint import pprint try: from pysqlite2 import dbapi2 except ImportError: import warnings warnings.warn("pysqlite2 is not installed") __test__=False import sha,re,os from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store.REGEXMatching import REGEXTerm, NATIVE_REGEX, PYTHON_REGEX from rdflib.store.AbstractSQLStore import * Any = None #User-defined REGEXP operator def regexp(expr, item): r = re.compile(expr) return r.match(item) is not None class SQLite(AbstractSQLStore): """ SQLite store formula-aware implementation. It stores it's triples in the following partitions: - Asserted non rdf:type statements - Asserted rdf:type statements (in a table which models Class membership) The motivation for this partition is primarily query speed and scalability as most graphs will always have more rdf:type statements than others - All Quoted statements In addition it persists namespace mappings in a seperate table """ context_aware = True formula_aware = True transaction_aware = True regex_matching = PYTHON_REGEX autocommit_default = False def open(self, home, create=True): """ Opens the store specified by the configuration string. If create is True a store will be created if it does not already exist. If create is False and a store does not already exist an exception is raised. An exception is also raised if a store exists, but there is insufficient permissions to open the store.""" if create: db = dbapi2.connect(os.path.join(home,self.identifier)) c=db.cursor() c.execute(CREATE_ASSERTED_STATEMENTS_TABLE%(self._internedId)) c.execute(CREATE_ASSERTED_TYPE_STATEMENTS_TABLE%(self._internedId)) c.execute(CREATE_QUOTED_STATEMENTS_TABLE%(self._internedId)) c.execute(CREATE_NS_BINDS_TABLE%(self._internedId)) c.execute(CREATE_LITERAL_STATEMENTS_TABLE%(self._internedId)) for tblName,indices in [ ( "%s_asserted_statements", [ ("%s_A_termComb_index",('termComb',)), ("%s_A_s_index",('subject',)), ("%s_A_p_index",('predicate',)), ("%s_A_o_index",('object',)), ("%s_A_c_index",('context',)), ], ), ( "%s_type_statements", [ ("%s_T_termComb_index",('termComb',)), ("%s_member_index",('member',)), ("%s_klass_index",('klass',)), ("%s_c_index",('context',)), ], ), ( "%s_literal_statements", [ ("%s_L_termComb_index",('termComb',)), ("%s_L_s_index",('subject',)), ("%s_L_p_index",('predicate',)), ("%s_L_c_index",('context',)), ], ), ( "%s_quoted_statements", [ ("%s_Q_termComb_index",('termComb',)), ("%s_Q_s_index",('subject',)), ("%s_Q_p_index",('predicate',)), ("%s_Q_o_index",('object',)), ("%s_Q_c_index",('context',)), ], ), ( "%s_namespace_binds", [ ("%s_uri_index",('uri',)), ], )]: for indexName,columns in indices: c.execute("CREATE INDEX %s on %s (%s)"%(indexName%self._internedId,tblName%(self._internedId),','.join(columns))) c.close() db.commit() db.close() self._db = dbapi2.connect(os.path.join(home,self.identifier)) self._db.create_function("regexp", 2, regexp) if os.path.exists(os.path.join(home,self.identifier)): c = self._db.cursor() c.execute("SELECT * FROM sqlite_master WHERE type='table'") tbls = [rt[1] for rt in c.fetchall()] c.close() for tn in [tbl%(self._internedId) for tbl in table_name_prefixes]: if tn not in tbls: #The database exists, but one of the partitions doesn't exist return 0 #Everything is there (the database and the partitions) return 1 #The database doesn't exist - nothing is there #return -1 def destroy(self, home): """ FIXME: Add documentation """ db = dbapi2.connect(os.path.join(home,self.identifier)) c=db.cursor() for tblsuffix in table_name_prefixes: try: c.execute('DROP table %s'%tblsuffix%(self._internedId)) except: print "unable to drop table: %s"%(tblsuffix%(self._internedId)) #Note, this only removes the associated tables for the closed world universe given by the identifier print "Destroyed Close World Universe %s ( in SQLite database %s)"%(self.identifier,home) db.commit() c.close() db.close() os.remove(os.path.join(home,self.identifier)) def EscapeQuotes(self,qstr): """ Ported from Ft.Lib.DbUtil """ if qstr is None: return '' tmp = qstr.replace("\\","\\\\") tmp = tmp.replace('"', '""') tmp = tmp.replace("'", "\\'") return tmp #This is overridden to leave unicode terms as is #Instead of converting them to ascii (the default behavior) def normalizeTerm(self,term): if isinstance(term,(QuotedGraph,Graph)): return term.identifier elif isinstance(term,Literal): return self.EscapeQuotes(term) elif term is None or isinstance(term,(list,REGEXTerm)): return term else: return term #Where Clause utility Functions #The predicate and object clause builders are modified in order to optimize #subjects and objects utility functions which can take lists as their last argument (object,predicate - respectively) def buildSubjClause(self,subject,tableName): if isinstance(subject,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject'),[subject] elif isinstance(subject,list): clauseStrings=[] paramStrings = [] for s in subject: if isinstance(s,REGEXTerm): clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject') + " %s") paramStrings.append(self.normalizeTerm(s)) elif isinstance(s,(QuotedGraph,Graph)): clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s") paramStrings.append(self.normalizeTerm(s.identifier)) else: clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s") paramStrings.append(self.normalizeTerm(s)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings elif isinstance(subject,(QuotedGraph,Graph)): return "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[self.normalizeTerm(subject.identifier)] else: return subject is not None and "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[subject] or None #Capable off taking a list of predicates as well (in which case sub clauses are joined with 'OR') def buildPredClause(self,predicate,tableName): if isinstance(predicate,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate'),[predicate] elif isinstance(predicate,list): clauseStrings=[] paramStrings = [] for p in predicate: if isinstance(p,REGEXTerm): clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate')) else: clauseStrings.append("%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s") paramStrings.append(self.normalizeTerm(p)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings else: return predicate is not None and "%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s",[predicate] or None #Capable of taking a list of objects as well (in which case sub clauses are joined with 'OR') def buildObjClause(self,obj,tableName): if isinstance(obj,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object'),[obj] elif isinstance(obj,list): clauseStrings=[] paramStrings = [] for o in obj: if isinstance(o,REGEXTerm): clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object')) paramStrings.append(self.normalizeTerm(o)) elif isinstance(o,(QuotedGraph,Graph)): clauseStrings.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s") paramStrings.append(self.normalizeTerm(o.identifier)) else: clauseStrings.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s") paramStrings.append(self.normalizeTerm(o)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings elif isinstance(obj,(QuotedGraph,Graph)): return "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[self.normalizeTerm(obj.identifier)] else: return obj is not None and "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[obj] or None def buildContextClause(self,context,tableName): context = context is not None and self.normalizeTerm(context.identifier) or context if isinstance(context,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.context'%tableName or 'context'),[context] else: return context is not None and "%s="%(tableName and '%s.context'%tableName or 'context')+"%s",[context] or None def buildTypeMemberClause(self,subject,tableName): if isinstance(subject,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.member'%tableName or 'member'),[subject] elif isinstance(subject,list): clauseStrings=[] paramStrings = [] for s in subject: clauseStrings.append("%s.member="%tableName+"%s") if isinstance(s,(QuotedGraph,Graph)): paramStrings.append(self.normalizeTerm(s.identifier)) else: paramStrings.append(self.normalizeTerm(s)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings else: return subject and u"%s.member = "%(tableName)+"%s",[subject] def buildTypeClassClause(self,obj,tableName): if isinstance(obj,REGEXTerm): return " REGEXP (%s,"+" %s)"%(tableName and '%s.klass'%tableName or 'klass'),[obj] elif isinstance(obj,list): clauseStrings=[] paramStrings = [] for o in obj: clauseStrings.append("%s.klass="%tableName+"%s") if isinstance(o,(QuotedGraph,Graph)): paramStrings.append(self.normalizeTerm(o.identifier)) else: paramStrings.append(self.normalizeTerm(o)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings else: return obj is not None and "%s.klass = "%tableName+"%s",[obj] or None def triples(self, (subject, predicate, obj), context=None): """ A generator over all the triples matching pattern. Pattern can be any objects for comparing against nodes in the store, for example, RegExLiteral, Date? DateRange? quoted table: _quoted_statements asserted rdf:type table: _type_statements asserted non rdf:type table: _asserted_statements triple columns: subject,predicate,object,context,termComb,objLanguage,objDatatype class membership columns: member,klass,context termComb FIXME: These union all selects *may* be further optimized by joins """ quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId c=self._db.cursor() parameters = [] if predicate == RDF.type: #select from asserted rdf:type partition and quoted table (if a context is specified) clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,context,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate: #Select from quoted partition (if context is specified), literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,context,True) parameters.extend(params) selects.append( ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ) ) elif predicate: #select from asserted non rdf:type partition (optionally), quoted partition (if context is speciied), and literal partition (optionally) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) if context is not None: clauseString,params = self.buildClause('quoted',subject,predicate, obj,context) parameters.extend(params) selects.append( ( quoted_table, 'quoted', clauseString, QUOTED_PARTITION ) ) q=self._normalizeSQLCmd(unionSELECT(selects,selectType=TRIPLE_SELECT_NO_ORDER)) self.executeSQL(c,q,parameters) #NOTE: SQLite does not support ORDER BY terms that aren't integers, so the entire result set must be iterated #in order to be able to return a generator of contexts tripleCoverage = {} result = c.fetchall() c.close() for rt in result: s,p,o,(graphKlass,idKlass,graphId) = extractTriple(rt,self,context) contexts = tripleCoverage.get((s,p,o),[]) contexts.append(graphKlass(self,idKlass(graphId))) tripleCoverage[(s,p,o)] = contexts for (s,p,o),contexts in tripleCoverage.items(): yield (s,p,o),(c for c in contexts) CREATE_ASSERTED_STATEMENTS_TABLE = """ CREATE TABLE %s_asserted_statements ( subject text not NULL, predicate text not NULL, object text not NULL, context text not NULL, termComb tinyint unsigned not NULL)""" CREATE_ASSERTED_TYPE_STATEMENTS_TABLE = """ CREATE TABLE %s_type_statements ( member text not NULL, klass text not NULL, context text not NULL, termComb tinyint unsigned not NULL)""" CREATE_LITERAL_STATEMENTS_TABLE = """ CREATE TABLE %s_literal_statements ( subject text not NULL, predicate text not NULL, object text, context text not NULL, termComb tinyint unsigned not NULL, objLanguage varchar(3), objDatatype text)""" CREATE_QUOTED_STATEMENTS_TABLE = """ CREATE TABLE %s_quoted_statements ( subject text not NULL, predicate text not NULL, object text, context text not NULL, termComb tinyint unsigned not NULL, objLanguage varchar(3), objDatatype text)""" CREATE_NS_BINDS_TABLE = """ CREATE TABLE %s_namespace_binds ( prefix varchar(20) UNIQUE not NULL, uri text, PRIMARY KEY (prefix))""" rdflib-2.4.2/rdflib/store/NodePickler.py0000644000175000017500000000165411153616032017076 0ustar nachonacho############## from cPickle import Pickler, Unpickler, UnpicklingError from cStringIO import StringIO class NodePickler(object): def __init__(self): self._objects = {} self._ids = {} self._get_object = self._objects.__getitem__ def _get_ids(self, key): try: return self._ids.get(key) except TypeError, e: return None def register(self, object, id): self._objects[id] = object self._ids[object] = id def loads(self, s): up = Unpickler(StringIO(s)) up.persistent_load = self._get_object try: return up.load() except KeyError, e: raise UnpicklingError, "Could not find Node class for %s" % e def dumps(self, obj, protocol=None, bin=None): src = StringIO() p = Pickler(src) p.persistent_id = self._get_ids p.dump(obj) return src.getvalue() rdflib-2.4.2/rdflib/store/MySQL.py0000644000175000017500000013372011153616032015644 0ustar nachonachofrom __future__ import generators from rdflib import BNode from rdflib.store import Store,VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.Literal import Literal from pprint import pprint import sys try: import MySQLdb except ImportError: import warnings warnings.warn("MySQLdb is not installed") __test__=False from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store.REGEXMatching import REGEXTerm, NATIVE_REGEX, PYTHON_REGEX from rdflib.store.AbstractSQLStore import * from FOPLRelationalModel.RelationalHash import IdentifierHash, LiteralHash, RelationalHash, GarbageCollectionQUERY from FOPLRelationalModel.BinaryRelationPartition import * from FOPLRelationalModel.QuadSlot import * Any = None def ParseConfigurationString(config_string): """ Parses a configuration string in the form: key1=val1,key2=val2,key3=val3,... The following configuration keys are expected (not all are required): user password db host port (optional - defaults to 3306) """ kvDict = dict([(part.split('=')[0],part.split('=')[-1]) for part in config_string.split(',')]) for requiredKey in ['user','db','host']: assert requiredKey in kvDict if 'port' not in kvDict: kvDict['port']=3306 if 'password' not in kvDict: kvDict['password']='' return kvDict def createTerm(termString,termType,store,objLanguage=None,objDatatype=None): if termType == 'L': cache = store.literalCache.get((termString,objLanguage,objDatatype)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = Literal(termString,objLanguage,objDatatype) store.literalCache[((termString,objLanguage,objDatatype))] = rt return rt elif termType=='F': cache = store.otherCache.get((termType,termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = QuotedGraph(store,URIRef(termString)) store.otherCache[(termType,termString)] = rt return rt elif termType == 'B': cache = store.bnodeCache.get((termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = TERM_INSTANCIATION_DICT[termType](termString) store.bnodeCache[(termString)] = rt return rt elif termType =='U': cache = store.uriCache.get((termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = URIRef(termString) store.uriCache[(termString)] = rt return rt else: cache = store.otherCache.get((termType,termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = TERM_INSTANCIATION_DICT[termType](termString) store.otherCache[(termType,termString)] = rt return rt def extractTriple(tupleRt,store,hardCodedContext=None): subject,sTerm,predicate,pTerm,obj,oTerm,rtContext,cTerm,objDatatype,objLanguage = tupleRt context = rtContext is not None and rtContext or hardCodedContext.identifier s=createTerm(subject,sTerm,store) p=createTerm(predicate,pTerm,store) o=createTerm(obj,oTerm,store,objLanguage,objDatatype) graphKlass, idKlass = constructGraph(cTerm) return s,p,o,(graphKlass,idKlass,context) class _variable_cluster(object): """ A `_variable_cluster` instance represents the mapping from a single triple pattern to the appropriate SQL components necessary for forming a SQL query that incorporates the triple pattern. A triple pattern can cover more than one variable, and each instance of this class maintains information about all the variables that are present in the corresponding triple pattern. For each variable in a triple pattern, that variable is either newly visible in this triple pattern or was visible in a previous triple pattern (in a sequence of triple patterns). Also, each variable may correspond to either a Literal or a non-Literal. Managing the consequences of these requirements and the corresponding SQL artifacts are the most important aspects of this class. """ def __init__(self, component_name, db_prefix, subject, predicate, object_, context): ''' Initialize a `_variable_cluster` instance based upon the syntactic parts of the triple pattern and additional information linking the triple pattern to its query context. Parameters: - `component_name`: A name prefix used to construct the SQL phrases produced by this instance. This prefix must be unique to the set of `_variable_cluster` instances corresponding to a complete SPARQL query. These prefixes might be of the form "component_N" for some N, and are used to identify which SQL columns correspond to which SPARQL variable. - `db_prefix`: A name prefix used to fully qualify SQL table references in the current DB. - `subject`, `predicate`, `object_`, `context`: The subject, predicate, object, and the name of the graph for the current triple pattern, respectively. Each of these will be RDFLib `node` objects, and at least one should be an RDFLib `variable`. ''' self.component_name = component_name self.db_prefix = db_prefix self.subject = subject self.subject_name = 'subject' self.predicate = predicate self.object_ = object_ self.object_name = 'object' self.context = context # If the predicate of this triple pattern is `rdf:type`, then the SQL # table uses 'member' for the subject and 'class' for the object. if RDF.type == self.predicate: self.subject_name = 'member' self.object_name = 'class' self.subset_name = None '''String indicating the base name of the table or view containing RDF statements that this triple pattern will reference.''' self.object_may_be_literal = False '''This will be True if and only if it is determined that the object of the represented triple pattern is a variable that could resolve to a literal.''' self.index = None '''The manager of this object must maintain a list of all `_variable_cluster` instances that contain variables. Once the manager processes this `_variable_cluster` in context, this variable contains the index of this object within that list.''' self.non_object_columns = [] '''SQL column phrases that are not relevant to the object of the triple pattern that this object represents.''' self.object_columns = [] '''SQL column phrases that are relevant to the object of the triple pattern that this object represents.''' self.join_fragment = None '''SQL phrase that defines the table to use for the triple pattern that this object represents in the "from" clause of the full SQL query.''' self.where_fragments = [] '''SQL condition phrases that will be conjunctively joined together in the "where" clause of the full SQL query.''' self.definitions = {} '''Map from variable name managed by this object to the full column reference that represents that variable.''' self.variable_columns = [] '''List of 4-tuples consisting of the names of the variables managed by this instance, the column reference that represents the variable, a flag that is True if and only if the variable corresponds to the object of the triple pattern, and a reference to this object.''' self.substitutions = [] '''List of all the static data that must be substituted into the query string by the query mechanism, in the proper order based upon placeholders in the query.''' def determine_initial_subset(self, store): ''' Determine the most specific RDF statement subset that this triple pattern can use, based on the types of the parts of the statement and the information provided about properties provided by the MySQL store (in the `store` parameter). This is crucial to optimization, because specific subsets (SQL tables such as the 'relations' table) are very efficient but general subsets (SQL views such as the 'URI_or_literal_objects' view) are very inefficient. :Parameters: - `store`: RDFLib MySQL store containing the target data. ''' if isinstance(self.subject, Literal): raise ValueError( 'A subject cannot be a literal.') if isinstance(self.predicate, URIRef): # This is a good case, performance-wise, as we can be much more # specific about the subset that we use when we have a non-variable # predicate. if RDF.type == self.predicate: # TODO: find a constant for these somewhere self.subset_name = "associativeBox" elif isinstance(self.object_, Literal): self.subset_name = "literalProperties" elif isinstance(self.object_, Variable): if self.predicate in store.literal_properties: self.subset_name = "literalProperties" elif self.predicate in store.resource_properties: self.subset_name = "relations" else: self.subset_name = "URI_or_literal_object" else: self.subset_name = "relations" elif isinstance(self.predicate, Variable): if isinstance(self.object_, Literal): self.subset_name = "literalProperties" elif not isinstance(self.object_, Variable): self.subset_name = "relation_or_associativeBox" else: self.subset_name = "all" else: raise ValueError( 'Each predicate must either a URIRef or a Variable.') # Once we know the initial subset to use, we can construct the join # fragment that this object will provide. self.join_fragment = (self.db_prefix + '_' + self.subset_name + ' as %s_statements' % (self.component_name,)) def note_object_is_named(self): ''' Note new information indicating that the object variable cannot be a literal. In a list of triple patterns, a later triple pattern may refer to the same variable as a previous triple pattern, and the later variable reference may further constrain the potential type of the initial instance of the variable (in particular, that the variable cannot be a literal), in which case the manager of the later variable reference should call this method on the manager of the initial variable reference. ''' if self.object_may_be_literal: self.object_may_be_literal = False if 'URI_or_literal_object' == self.subset_name: self.subset_name = 'relations' self.join_fragment = (self.db_prefix + '_' + self.subset_name + ' as %s_statements' % (self.component_name,)) column_name = self.component_name + '_object' self.object_columns = ['%s_statements.%s as %s' % (self.component_name, self.object_name, column_name)] def get_SQL_clause(self): return ' ' + self.join_fragment def process_variable(self, variable, variable_bindings, role): ''' Set up the state for managing one variable for the current triple pattern. This method does not currently support object variables, as they present a sufficiently different case that they should be handled manually. :Parameters: - `variable`: The variable to manage. - `variable_bindings`: A map of previously existing variables. - `role`: String indicating the role that this variable plays in the managed triple pattern. This should be one of 'subject', 'predicate', or 'context'. Returns a list containing the new variable name and manager tuple, or an empty list if this is a previously seen variable. This is a private method. ''' if 'object' == role: raise ValueError( '`process_variable` cannot current handle object variables. ' + 'Please deal with them manually.') variable_name = str(variable) if 'subject' == role: statements_column = self.subject_name elif 'object' == role: statements_column = self.object_name else: statements_column = role if variable_name in variable_bindings: # Since the variable name was already seen, link the current occurance # of the variable to the initial occurance using a predicate in the # SQL 'where' phrase. initial_reference = variable_bindings[variable_name] self.where_fragments.append('%s_statements.%s = %s' % ( self.component_name, statements_column, initial_reference.definitions[variable_name])) # Also, if the initial occurance of the variable was in the object # role and this occurance is not in the object role, then the variable # cannot refer to a literal, so communicate this to the manager of the # initial occurance. if initial_reference.definitions[variable_name].split( '.')[1] == 'object' and 'object' != role: initial_reference.note_object_is_named() return [] else: # Note that this is the first occurance of the variable; this includes # adding appropriate SQL phrases that bind to this variable. defining_reference = self.component_name + "_statements.%s" % ( statements_column,) self.definitions[variable_name] = defining_reference column_name = self.component_name + '_' + role self.non_object_columns.append('%s as %s' % ( defining_reference, column_name)) self.non_object_columns.append('%s_term as %s_term' % ( defining_reference, column_name)) self.variable_columns.append((variable_name, column_name, False, self)) return [(variable_name, self)] def make_SQL_components(self, variable_bindings, variable_clusters): ''' Process all the terms from the managed RDF triple pattern in the appropriate context. :Parameters: - `variable_bindings`: Map of existing variable bindings. It is crucial that the caller updates this map after each triple pattern is processed. - `variable_clusters`: List of existing `_variable_cluster` objects that manage variables. Returns a list of 2-tuples consisting of newly managed variables and a reference to this object (which may be empty if there are no new variables in this triple pattern). ''' if self.index is not None: raise ValueError('`make_SQL_components` should only be run once per ' + '`_variable_cluster` instance.') self.index = len(variable_clusters) local_binding_list = [] # First, process subject, predicate, and context from the managed triple # pattern, as they are all similar cases in that they cannot be # literals. if isinstance(self.subject, Variable): local_binding_list.extend( self.process_variable(self.subject, variable_bindings, 'subject')) elif not isinstance(self.subject, Literal): self.where_fragments.append('%s_statements.%s = %%s' % (self.component_name, self.subject_name,)) self.substitutions.append(normalizeNode(self.subject)) else: raise ValueError('The subject of a triple pattern cannot be a literal.') if isinstance(self.predicate, Variable): local_binding_list.extend( self.process_variable(self.predicate, variable_bindings, 'predicate')) elif RDF.type != self.predicate: self.where_fragments.append('%s_statements.predicate = %%s' % (self.component_name,)) self.substitutions.append(normalizeNode(self.predicate)) if isinstance(self.context, Variable): local_binding_list.extend( self.process_variable(self.context, variable_bindings, 'context')) elif isinstance(self.context, URIRef) or isinstance(self.context, BNode): self.where_fragments.append('%s_statements.context = %%s' % (self.component_name,)) self.substitutions.append(normalizeNode(self.context)) # Process the object of the triple pattern manually, as it could be a # literal and so requires special handling to query properly. if isinstance(self.object_, Variable): variable_name = str(self.object_) if variable_name in variable_bindings: initial_reference = variable_bindings[variable_name] self.where_fragments.append('%s_statements.%s = %s' % ( self.component_name, self.object_name, initial_reference.definitions[variable_name])) if 'URI_or_literal_object' == self.subset_name: self.subset_name = 'relations' self.join_fragment = ( self.db_prefix + '_' + self.subset_name + ' as %s_statements' % (self.component_name,)) else: defining_reference = self.component_name + "_statements.%s" % ( self.object_name,) self.definitions[variable_name] = defining_reference column_name = self.component_name + '_object' if 'URI_or_literal_object' == self.subset_name: self.object_may_be_literal = True if 'literalProperties' != self.subset_name: self.non_object_columns.append('%s_statements.%s_term as %s_term' % (self.component_name, self.object_name, column_name)) else: self.non_object_columns.append("'L' as %s_term" % (column_name,)) self.object_columns.append('%s as %s' % (defining_reference, column_name)) if not('relations' == self.subset_name or 'associativeBox' == self.subset_name): self.object_columns.append('%s_statements.data_type as %s_datatype' % (self.component_name, column_name)) self.object_columns.append('%s_statements.language as %s_language' % (self.component_name, column_name)) self.variable_columns.append((variable_name, column_name, True, self)) local_binding_list.append((variable_name, self)) else: self.where_fragments.append('%s_statements.%s = %%s' % (self.component_name, self.object_name,)) self.substitutions.append(normalizeNode(self.object_)) return local_binding_list class MySQL(Store): """ MySQL implementation of FOPL Relational Model as an rdflib Store """ context_aware = True formula_aware = True transaction_aware = True regex_matching = NATIVE_REGEX batch_unification = True def __init__(self, identifier=None, configuration=None,debug=False): self.debug = debug self.identifier = identifier and identifier or 'hardcoded' #Use only the first 10 bytes of the digest self._internedId = INTERNED_PREFIX + sha.new(self.identifier).hexdigest()[:10] #Setup FOPL RelationalModel objects self.idHash = IdentifierHash(self._internedId) self.valueHash = LiteralHash(self._internedId) self.binaryRelations = NamedBinaryRelations(self._internedId,self.idHash,self.valueHash) self.literalProperties = NamedLiteralProperties(self._internedId,self.idHash,self.valueHash) self.aboxAssertions = AssociativeBox(self._internedId,self.idHash,self.valueHash) self.tables = [ self.binaryRelations, self.literalProperties, self.aboxAssertions, self.idHash, self.valueHash ] self.createTables = [ self.idHash, self.valueHash, self.binaryRelations, self.literalProperties, self.aboxAssertions ] self.hashes = [self.idHash,self.valueHash] self.partitions = [self.literalProperties,self.binaryRelations,self.aboxAssertions,] #This is a dictionary which caputures the relationships between #the each view, it's prefix, the arguments to viewUnionSelectExpression #and the tables involved self.viewCreationDict={ '_all' : (False,self.partitions), '_URI_or_literal_object' : (False,[self.literalProperties, self.binaryRelations]), '_relation_or_associativeBox': (True,[self.binaryRelations, self.aboxAssertions]), '_all_objects' : (False,self.hashes) } #This parameter controls how exlusively the literal table is searched #If true, the Literal partition is searched *exclusively* if the object term #in a triple pattern is a Literal or a REGEXTerm. Note, the latter case #prevents the matching of URIRef nodes as the objects of a triple in the store. #If the object term is a wildcard (None) #Then the Literal paritition is searched in addition to the others #If this parameter is false, the literal partition is searched regardless of what the object #of the triple pattern is self.STRONGLY_TYPED_TERMS = False self._db = None self.configuration = None if configuration is not None: self.open(configuration) self.cacheHits = 0 self.cacheMisses = 0 self.literalCache = {} self.uriCache = {} self.bnodeCache = {} self.otherCache = {} self.literal_properties = set() '''set of URIRefs of those RDF properties which are known to range over literals.''' self.resource_properties = set() '''set of URIRefs of those RDF properties which are known to range over resources.''' def executeSQL(self,cursor,qStr,params=None,paramList=False): """ Overridded in order to pass params seperate from query for MySQLdb to optimize """ #self._db.autocommit(False) if params is None: cursor.execute(qStr) elif paramList: cursor.executemany(qStr,[tuple(item) for item in params]) else: cursor.execute(qStr,tuple(params)) def _dbState(self,db,configDict): c=db.cursor() c.execute("""SHOW DATABASES""") #FIXME This is a character set hack. See: http://sourceforge.net/forum/forum.php?thread_id=1448424&forum_id=70461 #self._db.charset = 'utf8' rt = c.fetchall() if (configDict['db'].encode('utf-8'),) in rt: for tn in self.tables: c.execute("""show tables like '%s'"""%(tn,)) rt=c.fetchall() if not rt: sys.stderr.write("table %s Doesn't exist\n" % (tn)); #The database exists, but one of the partitions doesn't exist return CORRUPTED_STORE #Everything is there (the database and the partitions) return VALID_STORE #The database doesn't exist - nothing is there return NO_STORE def _createViews(self,cursor): """ Helper function for creating views """ for suffix,(relations_only,tables) in self.viewCreationDict.items(): query='create view %s%s as %s'%(self._internedId, suffix, ' union all '.join([t.viewUnionSelectExpression(relations_only) for t in tables])) if self.debug: print >> sys.stderr, "## Creating View ##\n",query cursor.execute(query) #Database Management Methods def open(self, configuration, create=False): """ Opens the store specified by the configuration string. If create is True a store will be created if it does not already exist. If create is False and a store does not already exist an exception is raised. An exception is also raised if a store exists, but there is insufficient permissions to open the store. """ self.configuration = configuration configDict = ParseConfigurationString(configuration) if create: test_db = MySQLdb.connect(user=configDict['user'], passwd=configDict['password'], db='test', port=configDict['port'], host=configDict['host'], #use_unicode=True, #read_default_file='/etc/my-client.cnf' ) c=test_db.cursor() c.execute("""SET AUTOCOMMIT=0""") c.execute("""SHOW DATABASES""") if not (configDict['db'].encode('utf-8'),) in c.fetchall(): print >> sys.stderr, "creating %s (doesn't exist)"%(configDict['db']) c.execute("""CREATE DATABASE %s"""%(configDict['db'],)) test_db.commit() c.close() test_db.close() db = MySQLdb.connect(user = configDict['user'], passwd = configDict['password'], db=configDict['db'], port=configDict['port'], host=configDict['host'], #use_unicode=True, #read_default_file='/etc/my-client.cnf' ) c=db.cursor() c.execute("""SET AUTOCOMMIT=0""") c.execute(CREATE_NS_BINDS_TABLE%(self._internedId)) for kb in self.createTables: c.execute(kb.createSQL()) if isinstance(kb,RelationalHash) and kb.defaultSQL(): c.execute(kb.defaultSQL()) self._createViews(c) db.commit() c.close() db.close() else: #This branch is needed for backward compatibility #which didn't use SQL views _db=MySQLdb.connect(user = configDict['user'], passwd = configDict['password'], db=configDict['db'], port=configDict['port'], host=configDict['host']) if self._dbState(_db,configDict) == VALID_STORE: c=_db.cursor() c.execute("""SET AUTOCOMMIT=0""") existingViews=[] #check which views already exist views=[] for suffix in self.viewCreationDict: view = self._internedId+suffix views.append(view) c.execute("""show tables like '%s'"""%(view,)) rt=c.fetchall() if rt: existingViews.append(view) c.close() _db.close() if not existingViews: #None of the views have been defined - so this is #an old (but valid) store #we need to create the missing views db = MySQLdb.connect(user = configDict['user'], passwd = configDict['password'], db=configDict['db'], port=configDict['port'], host=configDict['host']) c=db.cursor() c.execute("""SET AUTOCOMMIT=0""") self._createViews(c) db.commit() c.close() elif len(existingViews)!=len(views): #Not all the view have been setup return CORRUPTED_STORE try: port = int(configDict['port']) except: raise ArithmeticError('MySQL port must be a valid integer') self._db = MySQLdb.connect(user = configDict['user'], passwd = configDict['password'], db=configDict['db'], port=port, host=configDict['host'], #use_unicode=True, #read_default_file='/etc/my.cnf' ) self._db.autocommit(False) return self._dbState(self._db,configDict) def destroy(self, configuration): """ FIXME: Add documentation """ configDict = ParseConfigurationString(configuration) msql_db = MySQLdb.connect(user=configDict['user'], passwd=configDict['password'], db=configDict['db'], port=configDict['port'], host=configDict['host'] ) msql_db.autocommit(False) c=msql_db.cursor() for tbl in self.tables + ["%s_namespace_binds"%self._internedId]: try: c.execute('DROP table %s'%tbl) #print "dropped table: %s"%(tblsuffix%(self._internedId)) except Exception, e: print >> sys.stderr, "unable to drop table: %s"%(tbl) print >> sys.stderr, e for suffix in self.viewCreationDict: view = self._internedId+suffix try: c.execute('DROP view %s'%view) except Exception, e: print >> sys.stderr, "unable to drop table: %s"%(view) print >> sys.stderr, e #Note, this only removes the associated tables for the closed world universe given by the identifier print >> sys.stderr, "Destroyed Close World Universe %s ( in MySQL database %s)"%(self.identifier,configDict['db']) msql_db.commit() msql_db.close() def batch_unify(self, patterns): """ Perform RDF triple store-level unification of a list of triple patterns (4-item tuples which correspond to a SPARQL triple pattern with an additional constraint for the graph name). For the MySQL backend, this method compiles the list of triple patterns into SQL statements that obtain bindings for all the variables in the list of triples patterns. :Parameters: - `patterns`: a list of 4-item tuples where any of the items can be one of: Variable, URIRef, BNode, or Literal. Returns a generator over dictionaries of solutions to the list of triple patterns. Each dictionary binds the variables in the triple patterns to the correct values for those variables. For more on unification see: http://en.wikipedia.org/wiki/Unification """ variable_bindings = {} variable_clusters = [] # Unpack each triple pattern, and for each pattern, create a # variable cluster for managing the variables in that triple # pattern. index = 0 for subject, predicate, object_, context in patterns: component_name = "component_" + str(index) index = index + 1 cluster = _variable_cluster( component_name, self._internedId, subject, predicate, object_, context) cluster.determine_initial_subset(self) bindings = cluster.make_SQL_components( variable_bindings, variable_clusters) variable_bindings.update(bindings) variable_clusters.append(cluster) from_fragments = [] where_fragments = [] columns = [] substitutions = [] variable_columns = [] # Consolidate the various SQL fragments from each variable cluster. for cluster in variable_clusters: from_fragments.append(cluster.get_SQL_clause()) where_fragments.extend(cluster.where_fragments) columns.extend(cluster.non_object_columns) columns.extend(cluster.object_columns) substitutions.extend(cluster.substitutions) variable_columns.extend(cluster.variable_columns) if len(variable_columns) < 1: return # Construct and execute the SQL query. columns_fragment = ', '.join(columns) from_fragment = ',\n '.join(from_fragments) where_fragment = ' and '.join(where_fragments) if len(where_fragment) > 0: where_fragment = '\nwhere\n' + where_fragment query = "select straight_join\n%s\nfrom\n%s%s\n" % ( columns_fragment, from_fragment, where_fragment) if self.debug: print >> sys.stderr, query, substitutions cursor = self._db.cursor() cursor.execute(query, substitutions) preparation_cursor = self._db.cursor() def prepare_row(row): ''' Convert a single row from the results of the big SPARQL solution query to a map from query variables to lexical values. :Parameters: - `row`: The return value of `fetchone()` on an MySQLdb cursor object after executing the SPARQL solving SQL query. Returns a dictionary from SPARQL variable names to one set of correct values for the original list of SPARQL triple patterns. ''' # First, turn the list into a map from column names to values. row_map = dict(zip( [description[0] for description in cursor.description], row)) # As the values are all integers, we must execute another SQL # query to map the integers to their lexical values. This query # is straightforward to build, so we can do it here instead of in # using helper objects. prefix = self._internedId columns = [] from_fragments = [] where_fragments = [] substitutions = [] for varname, column_name, is_object, cluster in variable_columns: component_name = "component_" + str(len(from_fragments)) columns.append(component_name + ".lexical as " + column_name) where_fragments.append(component_name + '.id = %s') substitutions.append(row_map[column_name]) term = row_map[column_name + '_term'] if 'L' == term: from_fragments.append('%s_literals as %s' % (prefix, component_name)) datatype = row_map[column_name + '_datatype'] if datatype: from_fragments.append('%s_identifiers as %s_datatype' % (prefix, component_name)) columns.append('%s_datatype.lexical as %s_datatype' % (component_name, column_name)) where_fragments.append(component_name + '_datatype.id = %s') substitutions.append(datatype) else: from_fragments.append('%s_identifiers as %s' % (prefix, component_name)) query = ('select\n%s\nfrom\n%s\nwhere\n%s\n' % (', '.join(columns), ',\n'.join(from_fragments), ' and '.join(where_fragments))) if self.debug: print >> sys.stderr, query, substitutions preparation_cursor.execute(query, substitutions) prepared_map = dict(zip( [description[0] for description in preparation_cursor.description], preparation_cursor.fetchone())) # Unwrap the elements of `variable_columns`, which provide the # original SPARQL variable names and the corresponding SQL column # names and management information. Then map these SPARQL # variable names to the correct RDFLib node objects, using the # lexical information obtained using the query above. new_row = {} for varname, column_name, is_object, cluster in variable_columns: aVariable = Variable(varname) lexical = prepared_map[column_name] term = row_map[column_name + '_term'] if 'L' == term: datatype = prepared_map.get(column_name + '_datatype', None) if datatype: datatype = URIRef(datatype) language = row_map[column_name + '_language'] node = Literal(lexical, datatype=datatype, lang=language) elif 'B' == term: node = BNode(lexical) elif 'U' == term: node = URIRef(lexical) else: raise ValueError('Unknown term type ' + term) new_row[aVariable] = node return new_row # Grab a row from the big solving query, process it, and yield the # result, until there are no more results. row = cursor.fetchone() while row: new_row = prepare_row(row) yield new_row row = cursor.fetchone() return #Transactional interfaces def commit(self): """ """ self._db.commit() def rollback(self): """ """ self._db.rollback() def gc(self): """ Purges unreferenced identifiers / values - expensive """ c=self._db.cursor() purgeQueries = GarbageCollectionQUERY( self.idHash, self.valueHash, self.binaryRelations, self.aboxAssertions, self.literalProperties) for q in purgeQueries: self.executeSQL(c,q) def add(self, (subject, predicate, obj), context=None, quoted=False): """ Add a triple to the store of triples. """ qSlots = genQuadSlots([subject,predicate,obj,context]) if predicate == RDF.type: kb = self.aboxAssertions elif isinstance(obj,Literal): kb = self.literalProperties else: kb = self.binaryRelations kb.insertRelations([qSlots]) kb.flushInsertions(self._db) def addN(self, quads): """ Adds each item in the list of statements to a specific context. The quoted argument is interpreted by formula-aware stores to indicate this statement is quoted/hypothetical. Note that the default implementation is a redirect to add """ for s,p,o,c in quads: assert c is not None, "Context associated with %s %s %s is None!"%(s,p,o) qSlots = genQuadSlots([s,p,o,c]) if p == RDF.type: kb = self.aboxAssertions elif isinstance(o,Literal): kb = self.literalProperties else: kb = self.binaryRelations kb.insertRelations([qSlots]) for kb in self.partitions: if kb.pendingInsertions: kb.flushInsertions(self._db) def remove(self, (subject, predicate, obj), context): """ Remove a triple from the store """ targetBRPs = BinaryRelationPartitionCoverage((subject,predicate,obj,context),self.partitions) c=self._db.cursor() for brp in targetBRPs: query = "DELETE %s from %s %s WHERE "%( brp, brp, brp.generateHashIntersections() ) whereClause,whereParameters = brp.generateWhereClause((subject,predicate,obj,context)) self.executeSQL(c,query+whereClause,params=whereParameters) c.close() def triples(self, (subject, predicate, obj), context=None): c=self._db.cursor() if context is None or isinstance(context.identifier,REGEXTerm): rt=PatternResolution((subject,predicate,obj,context),c,self.partitions,fetchall=False) else: #No need to order by triple (expensive), all result sets will be in the same context rt=PatternResolution((subject,predicate,obj,context),c,self.partitions,orderByTriple=False,fetchall=False) while rt: s,p,o,(graphKlass,idKlass,graphId) = extractTriple(rt,self,context) if context is None or isinstance(context.identifier,REGEXTerm): currentContext = graphKlass(self,idKlass(graphId)) else: currentContext = context contexts = [currentContext] rt = next = c.fetchone() if context is None or isinstance(context.identifier,REGEXTerm): sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) while sameTriple: s2,p2,o2,(graphKlass,idKlass,graphId) = extractTriple(next,self,context) c2 = graphKlass(self,idKlass(graphId)) contexts.append(c2) rt = next = c.fetchone() sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) yield (s,p,o),(con for con in contexts) c.close() def triples_choices(self, (subject, predicate, object_),context=None): """ A variant of triples that can take a list of terms instead of a single term in any slot. Stores can implement this to optimize the response time from the import default 'fallback' implementation, which will iterate over each term in the list and dispatch to tripless """ if isinstance(object_,list): assert not isinstance(subject,list), "object_ / subject are both lists" assert not isinstance(predicate,list), "object_ / predicate are both lists" if not object_: object_ = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg elif isinstance(subject,list): assert not isinstance(predicate,list), "subject / predicate are both lists" if not subject: subject = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg elif isinstance(predicate,list): assert not isinstance(subject,list), "predicate / subject are both lists" if not predicate: predicate = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg def __repr__(self): c=self._db.cursor() rtDict = {} countRows = "select count(*) from %s" countContexts = "select DISTINCT %s from %s" unionSelect = ' union '.join([countContexts%(part.columnNames[CONTEXT],str(part)) for part in self.partitions]) self.executeSQL(c,unionSelect) ctxCount = len(c.fetchall()) for part in self.partitions: self.executeSQL(c,countRows%part) rowCount = c.fetchone()[0] rtDict[str(part)]=rowCount return ""%( ctxCount, rtDict[str(self.aboxAssertions)], rtDict[str(self.literalProperties)], rtDict[str(self.binaryRelations)], ) def __len__(self, context=None): rows = [] countRows = "select count(*) from %s" c=self._db.cursor() for part in self.partitions: if context is not None: whereClause,whereParams = part.generateWhereClause((None,None,None,context.identifier)) self.executeSQL(c,countRows%part + " where " + whereClause,whereParams) else: self.executeSQL(c,countRows%part) rowCount = c.fetchone()[0] rows.append(rowCount) return reduce(lambda x,y: x+y,rows) def contexts(self, triple=None): c=self._db.cursor() if triple: subject,predicate,obj = triple else: subject = predicate = obj = None rt=PatternResolution((subject,predicate,obj,None), c, self.partitions, fetchall=False, fetchContexts=True) while rt: contextId,cTerm = rt graphKlass, idKlass = constructGraph(cTerm) yield graphKlass(self,idKlass(contextId)) rt = c.fetchone() #Namespace persistence interface implementation def bind(self, prefix, namespace): """ """ c=self._db.cursor() try: self.executeSQL( c, "INSERT INTO %s_namespace_binds VALUES ('%s', '%s')"%( self._internedId, prefix, namespace) ) except: pass c.close() def prefix(self, namespace): """ """ c=self._db.cursor() self.executeSQL(c,"select prefix from %s_namespace_binds where uri = '%s'"%( self._internedId, namespace) ) rt = [rtTuple[0] for rtTuple in c.fetchall()] c.close() return rt and rt[0] or None def namespace(self, prefix): """ """ c=self._db.cursor() try: self.executeSQL(c,"select uri from %s_namespace_binds where prefix = '%s'"%( self._internedId, prefix) ) except: return None rt = [rtTuple[0] for rtTuple in c.fetchall()] c.close() return rt and rt[0] or None def namespaces(self): """ """ c=self._db.cursor() self.executeSQL(c,"select prefix, uri from %s_namespace_binds where 1;"%( self._internedId ) ) rt=c.fetchall() c.close() for prefix,uri in rt: yield prefix,uri CREATE_NS_BINDS_TABLE = """ CREATE TABLE %s_namespace_binds ( prefix varchar(20) UNIQUE not NULL, uri text, PRIMARY KEY (prefix), INDEX uri_index (uri(100))) ENGINE=InnoDB""" rdflib-2.4.2/rdflib/store/Redland.py0000644000175000017500000000700311153616032016242 0ustar nachonacho import rdflib from rdflib.Graph import Graph from rdflib.URIRef import URIRef from rdflib.Node import Node from rdflib.BNode import BNode from rdflib.Literal import Literal try: import RDF except ImportError: import warnings warnings.warn("Redlands not installed") __test__=False from rdflib.store import Store def _t(i): if isinstance(i, rdflib.URIRef): return RDF.Node(RDF.Uri(unicode(i))) if isinstance(i, rdflib.BNode): return RDF.Node(blank=str(i)) if isinstance(i, rdflib.Literal): return RDF.Node(literal=str(i)) if isinstance(i, Graph): return _t(i.identifier) if i is None: return None raise TypeError, 'Cannot convert %s' % `i` def _c(i): return _t(i) def _f(i): if isinstance(i, RDF.Uri): return rdflib.URIRef(i) if isinstance(i, RDF.Node): if i.is_blank(): return rdflib.BNode(i.blank_identifier) elif i.is_literal(): return rdflib.Literal(i) else: return URIRef(i.uri) if i is None: return None raise TypeError, 'Cannot convert %s' % `i` class Redland(Store): context_aware = True def __init__(self, model=None): super(Redland, self).__init__() if model is None: model = RDF.Model(RDF.MemoryStorage(options_string="contexts='yes'")) self.model = model def __len__(self, context=None): """ Return number of triples (statements in librdf). """ count = 0 for triple, cg in self.triples((None, None, None), context): count += 1 return count def add(self, (subject, predicate, object), context=None, quoted=False): """\ Add a triple to the store of triples. """ if context is not None: self.model.append(RDF.Statement(_t(subject), _t(predicate), _t(object)), _c(context)) else: self.model.append(RDF.Statement(_t(subject), _t(predicate), _t(object))) def remove(self, (subject, predicate, object), context, quoted=False): if context is None: contexts = self.contexts() else: contexts = [context] for context in contexts: if subject is None and predicate is None and object is None: self.model.remove_statements_with_context(_c(context)) else: del self.model[RDF.Statement(_t(subject), _t(predicate), _t(object)), _c(context)] def triples(self, (subject, predicate, object), context=None): """A generator over all the triples matching """ cgraph = RDF.Model() triple = RDF.Statement(_t(subject), _t(predicate), _t(object)) for statement, c in self.model.find_statements_context(triple): if context is None or _f(c) == context.identifier: cgraph.append(statement) for statement in cgraph.find_statements(triple): ret = [] for c in self.model.get_contexts(): if self.model.contains_statement_context(statement, _c(context)): ret.append(c) yield (_f(statement.subject), _f(statement.predicate), _f(statement.object)), iter(ret) def contexts(self, triple=None): # TODO: have Graph support triple? for context in self.model.get_contexts(): yield Graph(self, _f(context)) def bind(self, prefix, namespace): pass def namespace(self, prefix): pass def prefix(self, namespace): pass def namespaces(self): pass rdflib-2.4.2/rdflib/store/ZODB.py0000644000175000017500000000133511153616032015431 0ustar nachonacho# Author: Michel Pelletier Any = None from rdflib.store.IOMemory import IOMemory # you must export your PYTHONPATH to point to a Z2.8 or Z3+ installation to get this to work!, like: #export PYTHONPATH="/home/michel/dev/Zope3Trunk/src" try: # Zope 3 from persistent import Persistent except ImportError: # < Zope 2.8? from Persistence import Persistent from BTrees.IOBTree import IOBTree from BTrees.OIBTree import OIBTree from BTrees.OOBTree import OOBTree class ZODB(Persistent, IOMemory): def createForward(self): return IOBTree() def createReverse(self): return OIBTree() def createIndex(self): return IOBTree() def createPrefixMap(self): return OOBTree() rdflib-2.4.2/rdflib/store/AuditableStorage.py0000644000175000017500000001240011153616032020105 0ustar nachonacho""" This wrapper intercepts calls through the store interface And implements thread-safe logging of destructive operations (adds / removes) in reverse. This is persisted on the store instance and the reverse operations are executed In order to return the store to the state it was when the transaction began Since the reverse operations are persisted on the store, the store itself acts as a transaction. Calls to commit or rollback, flush the list of reverse operations This provides thread-safe atomicity and isolation (assuming concurrent operations occur with different store instances), but no durability (transactions are persisted in memory and wont be available to reverse operations after the systeme fails): A and I out of ACID. """ from rdflib.store import Store from rdflib.Graph import Graph, ConjunctiveGraph from pprint import pprint import threading destructiveOpLocks = { 'add':None, 'remove':None, } class AuditableStorage(Store): def __init__(self, storage): self.storage = storage self.context_aware = storage.context_aware #NOTE: this store can't be formula_aware as it doesn't have enough info to reverse #The removal of a quoted statement self.formula_aware = False#storage.formula_aware self.transaction_aware = True #This is only half true self.reverseOps = [] self.rollbackLock = threading.RLock() def open(self, configuration, create=True): return self.storage.open(configuration,create) def close(self, commit_pending_transaction=False): self.storage.close() def destroy(self, configuration): self.storage.destroy(configuration) def add(self, (subject, predicate, object_), context, quoted=False): lock = destructiveOpLocks['add'] lock = lock and lock or threading.RLock() lock.acquire() context = context is not None and context.__class__(self.storage,context.identifier) or None ctxId = context is not None and context.identifier or None self.reverseOps.append((subject,predicate,object_,ctxId,'remove')) if (subject,predicate,object_,ctxId,'add') in self.reverseOps: self.reverseOps.remove((subject,predicate,object_,context,'add')) self.storage.add((subject, predicate, object_), context, quoted) lock.release() def remove(self, (subject, predicate, object_), context=None): lock = destructiveOpLocks['remove'] lock = lock and lock or threading.RLock() lock.acquire() #Need to determine which quads will be removed if any term is a wildcard context = context is not None and context.__class__(self.storage,context.identifier) or None ctxId = context is not None and context.identifier or None if None in [subject,predicate,object_,context]: if ctxId: for s,p,o in context.triples((subject,predicate,object_)): if (s,p,o,ctxId,'remove') in self.reverseOps: self.reverseOps.remove((s,p,o,ctxId,'remove')) else: self.reverseOps.append((s,p,o,ctxId,'add')) else: for s,p,o,ctx in ConjunctiveGraph(self.storage).quads((subject,predicate,object_)): if (s,p,o,ctx.identifier,'remove') in self.reverseOps: self.reverseOps.remove((s,p,o,ctx.identifier,'remove')) else: self.reverseOps.append((s,p,o,ctx.identifier,'add')) elif (subject,predicate,object_,ctxId,'add') in self.reverseOps: self.reverseOps.remove((subject,predicate,object_,ctxId,'add')) else: self.reverseOps.append((subject,predicate,object_,ctxId,'add')) self.storage.remove((subject,predicate,object_),context) lock.release() def triples(self, (subject, predicate, object_), context=None): context = context is not None and context.__class__(self.storage,context.identifier) or None for (s,p,o),cg in self.storage.triples((subject, predicate, object_), context): yield (s,p,o),cg def __len__(self, context=None): context = context is not None and context.__class__(self.storage,context.identifier) or None return self.storage.__len__(context) def contexts(self, triple=None): for ctx in self.storage.contexts(triple): yield ctx def bind(self, prefix, namespace): self.storage.bind(prefix, namespace) def prefix(self, namespace): return self.storage.prefix(namespace) def namespace(self, prefix): return self.storage.namespace(prefix) def namespaces(self): return self.storage.namespaces() def commit(self): self.storage.commit() self.reverseOps = [] def rollback(self): #Aquire Rollback lock and apply reverse operations in the forward order self.rollbackLock.acquire() for subject,predicate,obj,context,op in self.reverseOps: if op == 'add': self.storage.add((subject,predicate,obj),Graph(self.storage,context)) else: self.storage.remove((subject,predicate,obj),Graph(self.storage,context)) self.reverseOps = [] self.rollbackLock.release() rdflib-2.4.2/rdflib/store/REGEXMatching.py0000644000175000017500000001323511153616033017223 0ustar nachonacho""" This wrapper intercepts calls through the store interface which make use of The REGEXTerm class to represent matches by REGEX instead of literal comparison Implemented for stores that don't support this and essentially provides the support by replacing the REGEXTerms by wildcards (None) and matching against the results from the store it's wrapping """ from rdflib.store import Store from pprint import pprint from rdflib.Graph import Graph, QuotedGraph, ConjunctiveGraph, BackwardCompatGraph import re #Store is capable of doing it's own REGEX matching NATIVE_REGEX = 0 #Store uses python's re module internally for REGEX matching (SQLite for instance) PYTHON_REGEX = 1 #REGEXTerm can be used in any term slot and is interpreted as #a request to perform a REGEX match (not a string comparison) using the value #(pre-compiled) for checkin rdf:type matches class REGEXTerm(unicode): def __init__(self,expr): self.compiledExpr = re.compile(expr) def __reduce__(self): return (REGEXTerm, (unicode(''),)) def regexCompareQuad(quad,regexQuad): for index in range(4): if isinstance(regexQuad[index],REGEXTerm) and not regexQuad[index].compiledExpr.match(quad[index]): return False return True class REGEXMatching(Store): def __init__(self, storage): self.storage = storage self.context_aware = storage.context_aware #NOTE: this store can't be formula_aware as it doesn't have enough info to reverse #The removal of a quoted statement self.formula_aware = storage.formula_aware self.transaction_aware = storage.transaction_aware def open(self, configuration, create=True): return self.storage.open(configuration,create) def close(self, commit_pending_transaction=False): self.storage.close() def destroy(self, configuration): self.storage.destroy(configuration) def add(self, (subject, predicate, object_), context, quoted=False): self.storage.add((subject, predicate, object_), context, quoted) def remove(self, (subject, predicate, object_), context=None): if isinstance(subject,REGEXTerm) or \ isinstance(predicate,REGEXTerm) or \ isinstance(object_,REGEXTerm) or \ (context is not None and isinstance(context.identifier,REGEXTerm)): #One or more of the terms is a REGEX expression, so we must replace it / them with wildcard(s) #and match after we query s = not isinstance(subject,REGEXTerm) and subject or None p = not isinstance(predicate,REGEXTerm) and predicate or None o = not isinstance(object_,REGEXTerm) and object_ or None c = (context is not None and not isinstance(context.identifier,REGEXTerm)) and context or None removeQuadList = [] for (s1,p1,o1),cg in self.storage.triples((s,p,o),c): for ctx in cg: ctx = ctx.identifier if regexCompareQuad((s1,p1,o1,ctx),(subject,predicate,object_,context is not None and context.identifier or context)): removeQuadList.append((s1,p1,o1,ctx)) for s,p,o,c in removeQuadList: self.storage.remove((s,p,o),c and Graph(self,c) or c) else: self.storage.remove((subject,predicate,object_),context) def triples(self, (subject, predicate, object_), context=None): if isinstance(subject,REGEXTerm) or \ isinstance(predicate,REGEXTerm) or \ isinstance(object_,REGEXTerm) or \ (context is not None and isinstance(context.identifier,REGEXTerm)): #One or more of the terms is a REGEX expression, so we must replace it / them with wildcard(s) #and match after we query s = not isinstance(subject,REGEXTerm) and subject or None p = not isinstance(predicate,REGEXTerm) and predicate or None o = not isinstance(object_,REGEXTerm) and object_ or None c = (context is not None and not isinstance(context.identifier,REGEXTerm)) and context or None for (s1,p1,o1),cg in self.storage.triples((s,p,o),c): matchingCtxs = [] for ctx in cg: if c is None: if context is None or context.identifier.compiledExpr.match(ctx.identifier): matchingCtxs.append(ctx) else: matchingCtxs.append(ctx) if matchingCtxs and regexCompareQuad((s1,p1,o1,None),(subject,predicate,object_,None)): yield (s1,p1,o1),(c for c in matchingCtxs) else: for (s1,p1,o1),cg in self.storage.triples((subject, predicate, object_), context): yield (s1,p1,o1),cg def __len__(self, context=None): #NOTE: If the context is a REGEX this could be an expensive proposition return self.storage.__len__(context) def contexts(self, triple=None): #NOTE: There is no way to control REGEX matching for this method at this level #(as it only returns the contexts, not the matching triples for ctx in self.storage.contexts(triple): yield ctx def remove_context(self, identifier): self.storage.remove((None,None,None),identifier) def bind(self, prefix, namespace): self.storage.bind(prefix, namespace) def prefix(self, namespace): return self.storage.prefix(namespace) def namespace(self, prefix): return self.storage.namespace(prefix) def namespaces(self): return self.storage.namespaces() def commit(self): self.storage.commit() def rollback(self): self.storage.rollback() rdflib-2.4.2/rdflib/store/Memory.py0000644000175000017500000001345311153616032016147 0ustar nachonachofrom __future__ import generators ANY = None from rdflib.store import Store class Memory(Store): """\ An in memory implementation of a triple store. This triple store uses nested dictionaries to store triples. Each triple is stored in two such indices as follows spo[s][p][o] = 1 and pos[p][o][s] = 1. """ def __init__(self, configuration=None, identifier=None): super(Memory, self).__init__(configuration) self.identifier = identifier # indexed by [subject][predicate][object] self.__spo = {} # indexed by [predicate][object][subject] self.__pos = {} # indexed by [predicate][object][subject] self.__osp = {} self.__namespace = {} self.__prefix = {} def add(self, (subject, predicate, object), context, quoted=False): """\ Add a triple to the store of triples. """ # add dictionary entries for spo[s][p][p] = 1 and pos[p][o][s] # = 1, creating the nested dictionaries where they do not yet # exits. spo = self.__spo try: po = spo[subject] except: po = spo[subject] = {} try: o = po[predicate] except: o = po[predicate] = {} o[object] = 1 pos = self.__pos try: os = pos[predicate] except: os = pos[predicate] = {} try: s = os[object] except: s = os[object] = {} s[subject] = 1 osp = self.__osp try: sp = osp[object] except: sp = osp[object] = {} try: p = sp[subject] except: p = sp[subject] = {} p[predicate] = 1 def remove(self, (subject, predicate, object), context=None): for (subject, predicate, object), c in self.triples((subject, predicate, object)): del self.__spo[subject][predicate][object] del self.__pos[predicate][object][subject] del self.__osp[object][subject][predicate] def triples(self, (subject, predicate, object), context=None): """A generator over all the triples matching """ if subject!=ANY: # subject is given spo = self.__spo if subject in spo: subjectDictionary = spo[subject] if predicate!=ANY: # subject+predicate is given if predicate in subjectDictionary: if object!=ANY: # subject+predicate+object is given if object in subjectDictionary[predicate]: yield (subject, predicate, object), self.__contexts() else: # given object not found pass else: # subject+predicate is given, object unbound for o in subjectDictionary[predicate].keys(): yield (subject, predicate, o), self.__contexts() else: # given predicate not found pass else: # subject given, predicate unbound for p in subjectDictionary.keys(): if object!=ANY: # object is given if object in subjectDictionary[p]: yield (subject, p, object), self.__contexts() else: # given object not found pass else: # object unbound for o in subjectDictionary[p].keys(): yield (subject, p, o), self.__contexts() else: # given subject not found pass elif predicate!=ANY: # predicate is given, subject unbound pos = self.__pos if predicate in pos: predicateDictionary = pos[predicate] if object!=ANY: # predicate+object is given, subject unbound if object in predicateDictionary: for s in predicateDictionary[object].keys(): yield (s, predicate, object), self.__contexts() else: # given object not found pass else: # predicate is given, object+subject unbound for o in predicateDictionary.keys(): for s in predicateDictionary[o].keys(): yield (s, predicate, o), self.__contexts() elif object!=ANY: # object is given, subject+predicate unbound osp = self.__osp if object in osp: objectDictionary = osp[object] for s in objectDictionary.keys(): for p in objectDictionary[s].keys(): yield (s, p, object), self.__contexts() else: # subject+predicate+object unbound spo = self.__spo for s in spo.keys(): subjectDictionary = spo[s] for p in subjectDictionary.keys(): for o in subjectDictionary[p].keys(): yield (s, p, o), self.__contexts() def __len__(self, context=None): #@@ optimize i = 0 for triple in self.triples((None, None, None)): i += 1 return i def bind(self, prefix, namespace): self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): return self.__namespace.get(prefix, None) def prefix(self, namespace): return self.__prefix.get(namespace, None) def namespaces(self): for prefix, namespace in self.__namespace.iteritems(): yield prefix, namespace def __contexts(self): return (c for c in []) # TODO: best way to return empty generator rdflib-2.4.2/rdflib/store/AbstractSQLStore.py0000644000175000017500000012462211153616032020040 0ustar nachonachofrom __future__ import generators from rdflib import BNode from rdflib import RDF from rdflib.Literal import Literal from rdflib.URIRef import URIRef from rdflib.BNode import BNode from pprint import pprint import sha,sys, weakref from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store.REGEXMatching import REGEXTerm, PYTHON_REGEX from rdflib.store import Store Any = None COUNT_SELECT = 0 CONTEXT_SELECT = 1 TRIPLE_SELECT = 2 TRIPLE_SELECT_NO_ORDER = 3 ASSERTED_NON_TYPE_PARTITION = 3 ASSERTED_TYPE_PARTITION = 4 QUOTED_PARTITION = 5 ASSERTED_LITERAL_PARTITION = 6 FULL_TRIPLE_PARTITIONS = [QUOTED_PARTITION,ASSERTED_LITERAL_PARTITION] INTERNED_PREFIX = 'kb_' #Helper function for executing EXPLAIN on all dispatched SQL statements - for the pupose of analyzing #index usage def queryAnalysis(query,store,cursor): cursor.execute(store._normalizeSQLCmd('explain '+query)) rt=cursor.fetchall()[0] table,joinType,posKeys,_key,key_len,comparedCol,rowsExamined,extra = rt if not _key: assert joinType == 'ALL' if not hasattr(store,'queryOptMarks'): store.queryOptMarks = {} hits = store.queryOptMarks.get(('FULL SCAN',table),0) store.queryOptMarks[('FULL SCAN',table)] = hits + 1 if not hasattr(store,'queryOptMarks'): store.queryOptMarks = {} hits = store.queryOptMarks.get((_key,table),0) store.queryOptMarks[(_key,table)] = hits + 1 #Terms: u - uri refs v - variables b - bnodes l - literal f - formula #Helper function for building union all select statement #Takes a list of: # - table name # - table alias # - table type (literal, type, asserted, quoted) # - where clause string def unionSELECT(selectComponents,distinct=False,selectType=TRIPLE_SELECT): selects = [] for tableName,tableAlias,whereClause,tableType in selectComponents: if selectType == COUNT_SELECT: selectString = "select count(*)" tableSource = " from %s "%tableName elif selectType == CONTEXT_SELECT: selectString = "select %s.context"%tableAlias tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType in FULL_TRIPLE_PARTITIONS: selectString = "select *"#%(tableAlias) tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType == ASSERTED_TYPE_PARTITION: selectString =\ """select %s.member as subject, "%s" as predicate, %s.klass as object, %s.context as context, %s.termComb as termComb, NULL as objLanguage, NULL as objDatatype"""%(tableAlias,RDF.type,tableAlias,tableAlias,tableAlias) tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType == ASSERTED_NON_TYPE_PARTITION: selectString =\ """select *,NULL as objLanguage, NULL as objDatatype""" tableSource = " from %s as %s "%(tableName,tableAlias) #selects.append('('+selectString + tableSource + whereClause+')') selects.append(selectString + tableSource + whereClause) orderStmt = '' if selectType == TRIPLE_SELECT: orderStmt = ' order by subject,predicate,object' if distinct: return ' union '.join(selects) + orderStmt else: return ' union all '.join(selects) + orderStmt #Takes a tuple which represents an entry in a result set and #converts it to a tuple of terms using the termComb integer #to interpret how to instanciate each term def extractTriple(tupleRt,store,hardCodedContext=None): subject,predicate,obj,rtContext,termComb,objLanguage,objDatatype = tupleRt context = rtContext is not None and rtContext or hardCodedContext.identifier termCombString=REVERSE_TERM_COMBINATIONS[termComb] subjTerm,predTerm,objTerm,ctxTerm = termCombString s=createTerm(subject,subjTerm,store) p=createTerm(predicate,predTerm,store) o=createTerm(obj,objTerm,store,objLanguage,objDatatype) graphKlass, idKlass = constructGraph(ctxTerm) return s,p,o,(graphKlass,idKlass,context) #TODO: Stuff #Takes a term value, term type, and store intance #and Creates a term object. QuotedGraphs are instanciated differently def createTerm(termString,termType,store,objLanguage=None,objDatatype=None): if termType == 'L': cache = store.literalCache.get((termString,objLanguage,objDatatype)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = Literal(termString,objLanguage,objDatatype) store.literalCache[((termString,objLanguage,objDatatype))] = rt return rt elif termType=='F': cache = store.otherCache.get((termType,termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = QuotedGraph(store,URIRef(termString)) store.otherCache[(termType,termString)] = rt return rt elif termType == 'B': cache = store.bnodeCache.get((termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = TERM_INSTANCIATION_DICT[termType](termString) store.bnodeCache[(termString)] = rt return rt elif termType =='U': cache = store.uriCache.get((termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = URIRef(termString) store.uriCache[(termString)] = rt return rt else: cache = store.otherCache.get((termType,termString)) if cache is not None: #store.cacheHits += 1 return cache else: #store.cacheMisses += 1 rt = TERM_INSTANCIATION_DICT[termType](termString) store.otherCache[(termType,termString)] = rt return rt class SQLGenerator: def executeSQL(self,cursor,qStr,params=None,paramList=False): """ This takes the query string and parameters and (depending on the SQL implementation) either fill in the parameter in-place or pass it on to the Python DB impl (if it supports this). The default (here) is to fill the parameters in-place surrounding each param with quote characters """ #print qStr,params if not params: cursor.execute(unicode(qStr)) elif paramList: raise Exception("Not supported!") else: params = tuple([not isinstance(item,int) and u'"%s"'%item or item for item in params]) cursor.execute(qStr%params) #FIXME: This *may* prove to be a performance bottleneck and should perhaps be implemented in C (as it was in 4Suite RDF) def EscapeQuotes(self,qstr): """ Ported from Ft.Lib.DbUtil """ if qstr is None: return '' tmp = qstr.replace("\\","\\\\") tmp = tmp.replace("'", "\\'") return tmp #Normalize a SQL command before executing it. Commence unicode black magic def _normalizeSQLCmd(self,cmd): import types if not isinstance(cmd, types.UnicodeType): cmd = unicode(cmd, 'ascii') return cmd.encode('utf-8') #Takes a term and 'normalizes' it. #Literals are escaped, Graphs are replaced with just their identifiers def normalizeTerm(self,term): if isinstance(term,(QuotedGraph,Graph)): return term.identifier.encode('utf-8') elif isinstance(term,Literal): return self.EscapeQuotes(term).encode('utf-8') elif term is None or isinstance(term,(list,REGEXTerm)): return term else: return term.encode('utf-8') #Builds an insert command for a type table def buildTypeSQLCommand(self,member,klass,context,storeId): #columns: member,klass,context rt= "INSERT INTO %s_type_statements"%storeId + " VALUES (%s, %s, %s,%s)" return rt,[ self.normalizeTerm(member), self.normalizeTerm(klass), self.normalizeTerm(context.identifier), int(type2TermCombination(member,klass,context))] #Builds an insert command for literal triples (statements where the object is a Literal) def buildLiteralTripleSQLCommand(self,subject,predicate,obj,context,storeId): triplePattern = int(statement2TermCombination(subject,predicate,obj,context)) literal_table = "%s_literal_statements"%storeId command="INSERT INTO %s "%literal_table +"VALUES (%s, %s, %s, %s, %s,%s,%s)" return command,[ self.normalizeTerm(subject), self.normalizeTerm(predicate), self.normalizeTerm(obj), self.normalizeTerm(context.identifier), triplePattern, isinstance(obj,Literal) and obj.language or 'NULL', isinstance(obj,Literal) and obj.datatype or 'NULL'] #Builds an insert command for regular triple table def buildTripleSQLCommand(self,subject,predicate,obj,context,storeId,quoted): stmt_table = quoted and "%s_quoted_statements"%storeId or "%s_asserted_statements"%storeId triplePattern = statement2TermCombination(subject,predicate,obj,context) if quoted: command="INSERT INTO %s"%stmt_table +" VALUES (%s, %s, %s, %s, %s,%s,%s)" params = [ self.normalizeTerm(subject), self.normalizeTerm(predicate), self.normalizeTerm(obj), self.normalizeTerm(context.identifier), triplePattern, isinstance(obj,Literal) and obj.language or 'NULL', isinstance(obj,Literal) and obj.datatype or 'NULL'] else: command="INSERT INTO %s"%stmt_table + " VALUES (%s, %s, %s, %s, %s)" params = [ self.normalizeTerm(subject), self.normalizeTerm(predicate), self.normalizeTerm(obj), self.normalizeTerm(context.identifier), triplePattern] return command,params #Builds WHERE clauses for the supplied terms and, context def buildClause(self,tableName,subject,predicate, obj,context=None,typeTable=False): parameters=[] if typeTable: rdf_type_memberClause = rdf_type_contextClause = rdf_type_contextClause = None clauseParts = self.buildTypeMemberClause(self.normalizeTerm(subject),tableName) if clauseParts is not None: rdf_type_memberClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildTypeClassClause(self.normalizeTerm(obj),tableName) if clauseParts is not None: rdf_type_klassClause = clauseParts[0] parameters.extend(clauseParts[-1]) clauseParts = self.buildContextClause(context,tableName) if clauseParts is not None: rdf_type_contextClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) typeClauses = [rdf_type_memberClause,rdf_type_klassClause,rdf_type_contextClause] clauseString = ' and '.join([clause for clause in typeClauses if clause]) clauseString = clauseString and 'where '+clauseString or '' else: subjClause = predClause = objClause = contextClause = litDTypeClause = litLanguageClause = None clauseParts = self.buildSubjClause(self.normalizeTerm(subject),tableName) if clauseParts is not None: subjClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildPredClause(self.normalizeTerm(predicate),tableName) if clauseParts is not None: predClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildObjClause(self.normalizeTerm(obj),tableName) if clauseParts is not None: objClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildContextClause(context,tableName) if clauseParts is not None: contextClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildLitDTypeClause(obj,tableName) if clauseParts is not None: litDTypeClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauseParts = self.buildLitLanguageClause(obj,tableName) if clauseParts is not None: litLanguageClause = clauseParts[0] parameters.extend([param for param in clauseParts[-1] if param]) clauses=[subjClause,predClause,objClause,contextClause,litDTypeClause,litLanguageClause] clauseString = ' and '.join([clause for clause in clauses if clause]) clauseString = clauseString and 'where '+clauseString or '' return clauseString, [p for p in parameters if p] def buildLitDTypeClause(self,obj,tableName): if isinstance(obj,Literal): return obj.datatype is not None and ("%s.objDatatype="%(tableName)+"%s",[obj.datatype.encode('utf-8')]) or None else: return None def buildLitLanguageClause(self,obj,tableName): if isinstance(obj,Literal): return obj.language is not None and ("%s.objLanguage="%(tableName)+"%s",[obj.language.encode('utf-8')]) or None else: return None #Stubs for Clause Functions that are overridden by specific implementations (MySQL vs SQLite for instance) def buildSubjClause(self,subject,tableName): pass def buildPredClause(self,predicate,tableName): pass def buildObjClause(self,obj,tableName): pass def buildContextClause(self,context,tableName): pass def buildTypeMemberClause(self,subject,tableName): pass def buildTypeClassClause(self,obj,tableName): pass class AbstractSQLStore(SQLGenerator,Store): """ SQL-92 formula-aware implementation of an rdflib Store. It stores it's triples in the following partitions: - Asserted non rdf:type statements - Asserted literal statements - Asserted rdf:type statements (in a table which models Class membership) The motivation for this partition is primarily query speed and scalability as most graphs will always have more rdf:type statements than others - All Quoted statements In addition it persists namespace mappings in a seperate table """ context_aware = True formula_aware = True transaction_aware = True regex_matching = PYTHON_REGEX autocommit_default = True #Stubs for overidden def __init__(self, identifier=None, configuration=None): """ identifier: URIRef of the Store. Defaults to CWD configuration: string containing infomation open can use to connect to datastore. """ self.identifier = identifier and identifier or 'hardcoded' #Use only the first 10 bytes of the digest self._internedId = INTERNED_PREFIX + sha.new(self.identifier).hexdigest()[:10] #This parameter controls how exlusively the literal table is searched #If true, the Literal partition is searched *exclusively* if the object term #in a triple pattern is a Literal or a REGEXTerm. Note, the latter case #prevents the matching of URIRef nodes as the objects of a triple in the store. #If the object term is a wildcard (None) #Then the Literal paritition is searched in addition to the others #If this parameter is false, the literal partition is searched regardless of what the object #of the triple pattern is self.STRONGLY_TYPED_TERMS = False if configuration is not None: self.open(configuration) self.cacheHits = 0 self.cacheMisses = 0 self.literalCache = {} self.uriCache = {} self.bnodeCache = {} self.otherCache = {} self._db = None def close(self, commit_pending_transaction=False): """ FIXME: Add documentation!! """ if commit_pending_transaction: self._db.commit() self._db.close() #Triple Methods def add(self, (subject, predicate, obj), context=None, quoted=False): """ Add a triple to the store of triples. """ c=self._db.cursor() if self.autocommit_default: c.execute("""SET AUTOCOMMIT=0""") if quoted or predicate != RDF.type: #quoted statement or non rdf:type predicate #check if object is a literal if isinstance(obj,Literal): addCmd,params=self.buildLiteralTripleSQLCommand(subject,predicate,obj,context,self._internedId) else: addCmd,params=self.buildTripleSQLCommand(subject,predicate,obj,context,self._internedId,quoted) elif predicate == RDF.type: #asserted rdf:type statement addCmd,params=self.buildTypeSQLCommand(subject,obj,context,self._internedId) self.executeSQL(c,addCmd,params) c.close() def addN(self,quads): c=self._db.cursor() if self.autocommit_default: c.execute("""SET AUTOCOMMIT=0""") literalTriples = [] typeTriples = [] otherTriples = [] literalTripleInsertCmd = None typeTripleInsertCmd = None otherTripleInsertCmd = None for subject,predicate,obj,context in quads: if isinstance(context,QuotedGraph) or predicate != RDF.type: #quoted statement or non rdf:type predicate #check if object is a literal if isinstance(obj,Literal): cmd,params=self.buildLiteralTripleSQLCommand(subject,predicate,obj,context,self._internedId) literalTripleInsertCmd = literalTripleInsertCmd is not None and literalTripleInsertCmd or cmd literalTriples.append(params) else: cmd,params=self.buildTripleSQLCommand(subject,predicate,obj,context,self._internedId,isinstance(context,QuotedGraph)) otherTripleInsertCmd = otherTripleInsertCmd is not None and otherTripleInsertCmd or cmd otherTriples.append(params) elif predicate == RDF.type: #asserted rdf:type statement cmd,params=self.buildTypeSQLCommand(subject,obj,context,self._internedId) typeTripleInsertCmd = typeTripleInsertCmd is not None and typeTripleInsertCmd or cmd typeTriples.append(params) if literalTriples: self.executeSQL(c,literalTripleInsertCmd,literalTriples,paramList=True) if typeTriples: self.executeSQL(c,typeTripleInsertCmd,typeTriples,paramList=True) if otherTriples: self.executeSQL(c,otherTripleInsertCmd,otherTriples,paramList=True) c.close() def remove(self, (subject, predicate, obj), context): """ Remove a triple from the store """ if context is not None: if subject is None and predicate is None and object is None: self._remove_context(context) return c=self._db.cursor() if self.autocommit_default: c.execute("""SET AUTOCOMMIT=0""") quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId if not predicate or predicate != RDF.type: #Need to remove predicates other than rdf:type if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal): #remove literal triple clauseString,params = self.buildClause(literal_table,subject,predicate, obj,context) if clauseString: cmd ="DELETE FROM " + " ".join([literal_table,clauseString]) else: cmd ="DELETE FROM " + literal_table self.executeSQL(c,self._normalizeSQLCmd(cmd),params) for table in [quoted_table,asserted_table]: #If asserted non rdf:type table and obj is Literal, don't do anything (already taken care of) if table == asserted_table and isinstance(obj,Literal): continue else: clauseString,params = self.buildClause(table,subject,predicate,obj,context) if clauseString: cmd="DELETE FROM " + " ".join([table,clauseString]) else: cmd = "DELETE FROM " + table self.executeSQL(c,self._normalizeSQLCmd(cmd),params) if predicate == RDF.type or not predicate: #Need to check rdf:type and quoted partitions (in addition perhaps) clauseString,params = self.buildClause(asserted_type_table,subject,RDF.type,obj,context,True) if clauseString: cmd="DELETE FROM " + " ".join([asserted_type_table,clauseString]) else: cmd='DELETE FROM '+asserted_type_table self.executeSQL(c,self._normalizeSQLCmd(cmd),params) clauseString,params = self.buildClause(quoted_table,subject,predicate, obj,context) if clauseString: cmd=clauseString and "DELETE FROM " + " ".join([quoted_table,clauseString]) else: cmd = "DELETE FROM " + quoted_table self.executeSQL(c,self._normalizeSQLCmd(cmd),params) c.close() def triples(self, (subject, predicate, obj), context=None): """ A generator over all the triples matching pattern. Pattern can be any objects for comparing against nodes in the store, for example, RegExLiteral, Date? DateRange? quoted table: _quoted_statements asserted rdf:type table: _type_statements asserted non rdf:type table: _asserted_statements triple columns: subject,predicate,object,context,termComb,objLanguage,objDatatype class membership columns: member,klass,context termComb FIXME: These union all selects *may* be further optimized by joins """ quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId c=self._db.cursor() parameters = [] if predicate == RDF.type: #select from asserted rdf:type partition and quoted table (if a context is specified) clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,context,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate: #Select from quoted partition (if context is specified), literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,context,True) parameters.extend(params) selects.append( ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ) ) elif predicate: #select from asserted non rdf:type partition (optionally), quoted partition (if context is speciied), and literal partition (optionally) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) if context is not None: clauseString,params = self.buildClause('quoted',subject,predicate, obj,context) parameters.extend(params) selects.append( ( quoted_table, 'quoted', clauseString, QUOTED_PARTITION ) ) q=self._normalizeSQLCmd(unionSELECT(selects)) self.executeSQL(c,q,parameters) rt = c.fetchone() while rt: s,p,o,(graphKlass,idKlass,graphId) = extractTriple(rt,self,context) currentContext=graphKlass(self,idKlass(graphId)) contexts = [currentContext] rt = next = c.fetchone() sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) while sameTriple: s2,p2,o2,(graphKlass,idKlass,graphId) = extractTriple(next,self,context) c2 = graphKlass(self,idKlass(graphId)) contexts.append(c2) rt = next = c.fetchone() sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) yield (s,p,o),(c for c in contexts) def triples_choices(self, (subject, predicate, object_),context=None): """ A variant of triples that can take a list of terms instead of a single term in any slot. Stores can implement this to optimize the response time from the import default 'fallback' implementation, which will iterate over each term in the list and dispatch to tripless """ if isinstance(object_,list): assert not isinstance(subject,list), "object_ / subject are both lists" assert not isinstance(predicate,list), "object_ / predicate are both lists" if not object_: object_ = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg elif isinstance(subject,list): assert not isinstance(predicate,list), "subject / predicate are both lists" if not subject: subject = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg elif isinstance(predicate,list): assert not isinstance(subject,list), "predicate / subject are both lists" if not predicate: predicate = None for (s1, p1, o1), cg in self.triples((subject,predicate,object_),context): yield (s1, p1, o1), cg def __repr__(self): c=self._db.cursor() quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId selects = [ ( asserted_type_table, 'typeTable', '', ASSERTED_TYPE_PARTITION ), ( quoted_table, 'quoted', '', QUOTED_PARTITION ), ( asserted_table, 'asserted', '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=False,selectType=COUNT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q)) rt=c.fetchall() typeLen,quotedLen,assertedLen,literalLen = [rtTuple[0] for rtTuple in rt] return ""%(len([c for c in self.contexts()]),typeLen,quotedLen,literalLen,assertedLen) def __len__(self, context=None): """ Number of statements in the store. """ c=self._db.cursor() quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId parameters = [] quotedContext = assertedContext = typeContext = literalContext = None clauseParts = self.buildContextClause(context,quoted_table) if clauseParts: quotedContext,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,asserted_table) if clauseParts: assertedContext,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,asserted_type_table) if clauseParts: typeContext ,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,literal_table) if clauseParts: literalContext,params = clauseParts parameters.extend([p for p in params if p]) if context is not None: selects = [ ( asserted_type_table, 'typeTable', typeContext and 'where ' + typeContext or '', ASSERTED_TYPE_PARTITION ), ( quoted_table, 'quoted', quotedContext and 'where ' + quotedContext or '', QUOTED_PARTITION ), ( asserted_table, 'asserted', assertedContext and 'where ' + assertedContext or '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', literalContext and 'where ' + literalContext or '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=True,selectType=COUNT_SELECT) else: selects = [ ( asserted_type_table, 'typeTable', typeContext and 'where ' + typeContext or '', ASSERTED_TYPE_PARTITION ), ( asserted_table, 'asserted', assertedContext and 'where ' + assertedContext or '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', literalContext and 'where ' + literalContext or '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=False,selectType=COUNT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q),parameters) rt=c.fetchall() c.close() return reduce(lambda x,y: x+y, [rtTuple[0] for rtTuple in rt]) def contexts(self, triple=None): c=self._db.cursor() quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId parameters = [] if triple is not None: subject,predicate,obj=triple if predicate == RDF.type: #select from asserted rdf:type partition and quoted table (if a context is specified) clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,Any,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate: #Select from quoted partition (if context is specified), literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None) clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,Any,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) elif predicate: #select from asserted non rdf:type partition (optionally), quoted partition (if context is speciied), and literal partition (optionally) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) clauseString,params = self.buildClause('quoted',subject,predicate, obj) parameters.extend(params) selects.append( ( quoted_table, 'quoted', clauseString, QUOTED_PARTITION ) ) q=unionSELECT(selects,distinct=True,selectType=CONTEXT_SELECT) else: selects = [ ( asserted_type_table, 'typeTable', '', ASSERTED_TYPE_PARTITION ), ( quoted_table, 'quoted', '', QUOTED_PARTITION ), ( asserted_table, 'asserted', '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=True,selectType=CONTEXT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q),parameters) rt=c.fetchall() for context in [rtTuple[0] for rtTuple in rt]: yield context c.close() def _remove_context(self, identifier): """ """ assert identifier c=self._db.cursor() if self.autocommit_default: c.execute("""SET AUTOCOMMIT=0""") quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId for table in [quoted_table,asserted_table,asserted_type_table,literal_table]: clauseString,params = self.buildContextClause(identifier,table) self.executeSQL( c, self._normalizeSQLCmd("DELETE from %s "%table + "where %s"%clauseString), [p for p in params if p] ) c.close() # Optional Namespace methods #optimized interfaces (those needed in order to port Versa) def subjects(self, predicate=None, obj=None): """ A generator of subjects with the given predicate and object. """ raise Exception("Not implemented") #capable of taking a list of predicate terms instead of a single term def objects(self, subject=None, predicate=None): """ A generator of objects with the given subject and predicate. """ raise Exception("Not implemented") #optimized interfaces (others) def predicate_objects(self, subject=None): """ A generator of (predicate, object) tuples for the given subject """ raise Exception("Not implemented") def subject_objects(self, predicate=None): """ A generator of (subject, object) tuples for the given predicate """ raise Exception("Not implemented") def subject_predicates(self, object=None): """ A generator of (subject, predicate) tuples for the given object """ raise Exception("Not implemented") def value(self, subject, predicate=u'http://www.w3.org/1999/02/22-rdf-syntax-ns#value', object=None, default=None, any=False): """ Get a value for a subject/predicate, predicate/object, or subject/object pair -- exactly one of subject, predicate, object must be None. Useful if one knows that there may only be one value. It is one of those situations that occur a lot, hence this 'macro' like utility Parameters: ----------- subject, predicate, object -- exactly one must be None default -- value to be returned if no values found any -- if True: return any value in the case there is more than one else: raise UniquenessError""" raise Exception("Not implemented") #Namespace persistence interface implementation def bind(self, prefix, namespace): """ """ c=self._db.cursor() try: c.execute("INSERT INTO %s_namespace_binds VALUES ('%s', '%s')"%( self._internedId, prefix, namespace) ) except: pass c.close() def prefix(self, namespace): """ """ c=self._db.cursor() c.execute("select prefix from %s_namespace_binds where uri = '%s'"%( self._internedId, namespace) ) rt = [rtTuple[0] for rtTuple in c.fetchall()] c.close() return rt and rt[0] or None def namespace(self, prefix): """ """ c=self._db.cursor() try: c.execute("select uri from %s_namespace_binds where prefix = '%s'"%( self._internedId, prefix) ) except: return None rt = [rtTuple[0] for rtTuple in c.fetchall()] c.close() return rt and rt[0] or None def namespaces(self): """ """ c=self._db.cursor() c.execute("select prefix, uri from %s_namespace_binds where 1;"%( self._internedId ) ) rt=c.fetchall() c.close() for prefix,uri in rt: yield prefix,uri #Transactional interfaces def commit(self): """ """ self._db.commit() def rollback(self): """ """ self._db.rollback() table_name_prefixes = [ '%s_asserted_statements', '%s_type_statements', '%s_quoted_statements', '%s_namespace_binds', '%s_literal_statements' ] rdflib-2.4.2/rdflib/store/Sleepycat.py0000644000175000017500000004315711153616032016634 0ustar nachonachofrom rdflib.store import Store, VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.URIRef import URIRef from bsddb import db from os import mkdir from os.path import exists, abspath from urllib import pathname2url from threading import Thread import logging _logger = logging.getLogger(__name__) class Sleepycat(Store): context_aware = True formula_aware = True transaction_aware = False def __init__(self, configuration=None, identifier=None): self.__open = False self.__identifier = identifier super(Sleepycat, self).__init__(configuration) self.configuration = configuration self._loads = self.node_pickler.loads self._dumps = self.node_pickler.dumps self.db_env = None def __get_identifier(self): return self.__identifier identifier = property(__get_identifier) def _init_db_environment(self, homeDir, create=True): envsetflags = db.DB_CDB_ALLDB envflags = db.DB_INIT_MPOOL | db.DB_INIT_CDB | db.DB_THREAD if not exists(homeDir): if create==True: mkdir(homeDir) # TODO: implement create method and refactor this to it self.create(homeDir) else: return -1 db_env = db.DBEnv() db_env.set_cachesize(0, 1024*1024*50) # TODO #db_env.set_lg_max(1024*1024) db_env.set_flags(envsetflags, 1) db_env.open(homeDir, envflags | db.DB_CREATE) return db_env def is_open(self): return self.__open def open(self, path, create=True): homeDir = path if self.__identifier is None: self.__identifier = URIRef(pathname2url(abspath(homeDir))) self.db_env = db_env = self._init_db_environment(homeDir, create) self.__open = True dbname = None dbtype = db.DB_BTREE # auto-commit ensures that the open-call commits when transactions are enabled dbopenflags = db.DB_THREAD if self.transaction_aware == True: dbopenflags |= db.DB_AUTO_COMMIT dbmode = 0660 dbsetflags = 0 # create and open the DBs self.__indicies = [None,] * 3 self.__indicies_info = [None,] * 3 for i in xrange(0, 3): index_name = to_key_func(i)(("s", "p", "o"), "c") index = db.DB(db_env) index.set_flags(dbsetflags) index.open(index_name, dbname, dbtype, dbopenflags|db.DB_CREATE, dbmode) self.__indicies[i] = index self.__indicies_info[i] = (index, to_key_func(i), from_key_func(i)) lookup = {} for i in xrange(0, 8): results = [] for start in xrange(0, 3): score = 1 len = 0 for j in xrange(start, start+3): if i & (1<<(j%3)): score = score << 1 len += 1 else: break tie_break = 2-start results.append(((score, tie_break), start, len)) results.sort() score, start, len = results[-1] def get_prefix_func(start, end): def get_prefix(triple, context): if context is None: yield "" else: yield context i = start while i min_seconds or time()-t0 > max_seconds: self.__needs_sync = False _logger.debug("sync") self.sync() break else: sleep(1) except Exception, e: _logger.exception(e) def sync(self): if self.__open: for i in self.__indicies: i.sync() self.__contexts.sync() self.__namespace.sync() self.__prefix.sync() self.__i2k.sync() self.__k2i.sync() def close(self, commit_pending_transaction=False): self.__open = False self.__sync_thread.join() for i in self.__indicies: i.close() self.__contexts.close() self.__namespace.close() self.__prefix.close() self.__i2k.close() self.__k2i.close() self.db_env.close() def add(self, (subject, predicate, object), context, quoted=False, txn=None): """\ Add a triple to the store of triples. """ assert self.__open, "The Store must be open." assert context!=self, "Can not add triple directly to store" Store.add(self, (subject, predicate, object), context, quoted) _to_string = self._to_string s = _to_string(subject, txn=txn) p = _to_string(predicate, txn=txn) o = _to_string(object, txn=txn) c = _to_string(context, txn=txn) cspo, cpos, cosp = self.__indicies value = cspo.get("%s^%s^%s^%s^" % (c, s, p, o), txn=txn) if value is None: self.__contexts.put(c, "", txn=txn) contexts_value = cspo.get("%s^%s^%s^%s^" % ("", s, p, o), txn=txn) or "" contexts = set(contexts_value.split("^")) contexts.add(c) contexts_value = "^".join(contexts) assert contexts_value!=None cspo.put("%s^%s^%s^%s^" % (c, s, p, o), "", txn=txn) cpos.put("%s^%s^%s^%s^" % (c, p, o, s), "", txn=txn) cosp.put("%s^%s^%s^%s^" % (c, o, s, p), "", txn=txn) if not quoted: cspo.put("%s^%s^%s^%s^" % ("", s, p, o), contexts_value, txn=txn) cpos.put("%s^%s^%s^%s^" % ("", p, o, s), contexts_value, txn=txn) cosp.put("%s^%s^%s^%s^" % ("", o, s, p), contexts_value, txn=txn) self.__needs_sync = True def __remove(self, (s, p, o), c, quoted=False, txn=None): cspo, cpos, cosp = self.__indicies contexts_value = cspo.get("^".join(("", s, p, o, "")), txn=txn) or "" contexts = set(contexts_value.split("^")) contexts.discard(c) contexts_value = "^".join(contexts) for i, _to_key, _from_key in self.__indicies_info: i.delete(_to_key((s, p, o), c), txn=txn) if not quoted: if contexts_value: for i, _to_key, _from_key in self.__indicies_info: i.put(_to_key((s, p, o), ""), contexts_value, txn=txn) else: for i, _to_key, _from_key in self.__indicies_info: try: i.delete(_to_key((s, p, o), ""), txn=txn) except db.DBNotFoundError, e: pass # TODO: is it okay to ignore these? def remove(self, (subject, predicate, object), context, txn=None): assert self.__open, "The Store must be open." Store.remove(self, (subject, predicate, object), context) _to_string = self._to_string if context is not None: if context == self: context = None if subject is not None and predicate is not None and object is not None and context is not None: s = _to_string(subject, txn=txn) p = _to_string(predicate, txn=txn) o = _to_string(object, txn=txn) c = _to_string(context, txn=txn) value = self.__indicies[0].get("%s^%s^%s^%s^" % (c, s, p, o), txn=txn) if value is not None: self.__remove((s, p, o), c, txn=txn) self.__needs_sync = True else: cspo, cpos, cosp = self.__indicies index, prefix, from_key, results_from_key = self.__lookup((subject, predicate, object), context, txn=txn) cursor = index.cursor(txn=txn) try: current = cursor.set_range(prefix) needs_sync = True except db.DBNotFoundError: current = None needs_sync = False cursor.close() while current: key, value = current cursor = index.cursor(txn=txn) try: cursor.set_range(key) current = cursor.next() except db.DBNotFoundError: current = None cursor.close() if key.startswith(prefix): c, s, p, o = from_key(key) if context is None: contexts_value = index.get(key, txn=txn) or "" contexts = set(contexts_value.split("^")) # remove triple from all non quoted contexts contexts.add("") # and from the conjunctive index for c in contexts: for i, _to_key, _ in self.__indicies_info: i.delete(_to_key((s, p, o), c), txn=txn) else: self.__remove((s, p, o), c, txn=txn) else: break if context is not None: if subject is None and predicate is None and object is None: # TODO: also if context becomes empty and not just on remove((None, None, None), c) try: self.__contexts.delete(_to_string(context, txn=txn), txn=txn) except db.DBNotFoundError, e: pass self.__needs_sync = needs_sync def triples(self, (subject, predicate, object), context=None, txn=None): """A generator over all the triples matching """ assert self.__open, "The Store must be open." if context is not None: if context == self: context = None _from_string = self._from_string index, prefix, from_key, results_from_key = self.__lookup((subject, predicate, object), context, txn=txn) cursor = index.cursor(txn=txn) try: current = cursor.set_range(prefix) except db.DBNotFoundError: current = None cursor.close() while current: key, value = current cursor = index.cursor(txn=txn) try: cursor.set_range(key) current = cursor.next() except db.DBNotFoundError: current = None cursor.close() if key and key.startswith(prefix): contexts_value = index.get(key, txn=txn) yield results_from_key(key, subject, predicate, object, contexts_value) else: break def __len__(self, context=None): assert self.__open, "The Store must be open." if context is not None: if context == self: context = None if context is None: prefix = "^" else: prefix = "%s^" % self._to_string(context) index = self.__indicies[0] cursor = index.cursor() current = cursor.set_range(prefix) count = 0 while current: key, value = current if key.startswith(prefix): count +=1 current = cursor.next() else: break cursor.close() return count def bind(self, prefix, namespace): prefix = prefix.encode("utf-8") namespace = namespace.encode("utf-8") bound_prefix = self.__prefix.get(namespace) if bound_prefix: self.__namespace.delete(bound_prefix) self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): prefix = prefix.encode("utf-8") return self.__namespace.get(prefix, None) def prefix(self, namespace): namespace = namespace.encode("utf-8") return self.__prefix.get(namespace, None) def namespaces(self): cursor = self.__namespace.cursor() results = [] current = cursor.first() while current: prefix, namespace = current results.append((prefix, namespace)) current = cursor.next() cursor.close() for prefix, namespace in results: yield prefix, URIRef(namespace) def contexts(self, triple=None): _from_string = self._from_string _to_string = self._to_string if triple: s, p, o = triple s = _to_string(s) p = _to_string(p) o = _to_string(o) contexts = self.__indicies[0].get("%s^%s^%s^%s^" % ("", s, p, o)) if contexts: for c in contexts.split("^"): if c: yield _from_string(c) else: index = self.__contexts cursor = index.cursor() current = cursor.first() cursor.close() while current: key, value = current context = _from_string(key) yield context cursor = index.cursor() try: cursor.set_range(key) current = cursor.next() except db.DBNotFoundError: current = None cursor.close() def _from_string(self, i): k = self.__i2k.get(int(i)) return self._loads(k) def _to_string(self, term, txn=None): k = self._dumps(term) i = self.__k2i.get(k, txn=txn) if i is None: # weird behavoir from bsddb not taking a txn as a keyword argument # for append if self.transaction_aware: i = "%s" % self.__i2k.append(k, txn) else: i = "%s" % self.__i2k.append(k) self.__k2i.put(k, i, txn=txn) return i def __lookup(self, (subject, predicate, object), context, txn=None): _to_string = self._to_string if context is not None: context = _to_string(context, txn=txn) i = 0 if subject is not None: i += 1 subject = _to_string(subject, txn=txn) if predicate is not None: i += 2 predicate = _to_string(predicate, txn=txn) if object is not None: i += 4 object = _to_string(object, txn=txn) index, prefix_func, from_key, results_from_key = self.__lookup_dict[i] prefix = "^".join(prefix_func((subject, predicate, object), context)) return index, prefix, from_key, results_from_key def to_key_func(i): def to_key(triple, context): "Takes a string; returns key" return "^".join((context, triple[i%3], triple[(i+1)%3], triple[(i+2)%3], "")) # "" to tac on the trailing ^ return to_key def from_key_func(i): def from_key(key): "Takes a key; returns string" parts = key.split("^") return parts[0], parts[(3-i+0)%3+1], parts[(3-i+1)%3+1], parts[(3-i+2)%3+1] return from_key def results_from_key_func(i, from_string): def from_key(key, subject, predicate, object, contexts_value): "Takes a key and subject, predicate, object; returns tuple for yield" parts = key.split("^") if subject is None: # TODO: i & 1: # dis assemble and/or measure to see which is faster # subject is None or i & 1 s = from_string(parts[(3-i+0)%3+1]) else: s = subject if predicate is None:#i & 2: p = from_string(parts[(3-i+1)%3+1]) else: p = predicate if object is None:#i & 4: o = from_string(parts[(3-i+2)%3+1]) else: o = object return (s, p, o), (from_string(c) for c in contexts_value.split("^") if c) return from_key def readable_index(i): s, p, o = "?" * 3 if i & 1: s = "s" if i & 2: p = "p" if i & 4: o = "o" return "%s,%s,%s" % (s, p, o) rdflib-2.4.2/rdflib/store/_sqlobject.py0000644000175000017500000002726411153616032017031 0ustar nachonachofrom __future__ import generators __metaclass__ = type import logging _logger = logging.getLogger("rdflib.store._sqlobject") import re _literal = re.compile(r'''"(?P[^@&]*)"(?:@(?P[^&]*))?(?:&<(?P.*)>)?''') from urllib import quote, unquote from rdflib.store import Store from rdflib.Literal import Literal from rdflib.URIRef import URIRef from rdflib.BNode import BNode from rdflib.exceptions import ContextTypeError from rdflib.compat import rsplit import sqlobject from sqlobject import * LITERAL = 0 URI = 1 NO_URI = 'uri://oops/' Any = None class BaseObject(sqlobject.SQLObject): _lazyUpdate = True _cacheValues = False class Literals(BaseObject): hash = IntCol(notNull=1) value = StringCol(notNull=1, validator=validators.String(strip_spaces=1)) hashIndex = DatabaseIndex('hash') class Namespaces(BaseObject): hash = IntCol(notNull=1) value = StringCol(length=255, notNull=1, validator=validators.String(strip_spaces=1)) hashIndex = DatabaseIndex('hash') class PrefixNamespace(BaseObject): prefix = StringCol(length=255, notNull=1, validator=validators.String(strip_spaces=1)) ns = StringCol(length=255, notNull=1, validator=validators.String(strip_spaces=1)) prefixIndex = DatabaseIndex('prefix') nsIndex = DatabaseIndex('ns') prefixNsIndex = DatabaseIndex('ns', 'prefix') class Resources(BaseObject): hash = IntCol(notNull=1) ns = IntCol(notNull=1) name = StringCol(length=255, notNull=1, validator=validators.String(strip_spaces=1)) hashIndex = DatabaseIndex('hash') nsIndex = DatabaseIndex('ns') nameIndex = DatabaseIndex('name') nsNameIndex = DatabaseIndex('ns', 'name') hashNsNameIndex = DatabaseIndex('hash', 'ns', 'name') class Triples(BaseObject): subject = IntCol(notNull=1) predicate = IntCol(notNull=1) object = IntCol(notNull=1) objtype = IntCol(notNull=1, default=LITERAL) subjectIndex = DatabaseIndex('subject') predicateIndex = DatabaseIndex('predicate') objectIndex = DatabaseIndex('object', 'objtype') subjectPredicateIndex = DatabaseIndex('subject', 'predicate') subjectObjectIndex = DatabaseIndex('subject', 'object', 'objtype') predicateObjectIndex = DatabaseIndex('predicate', 'object', 'objtype') def splituri(uri): if uri.startswith('<') and uri.endswith('>'): uri = uri[1:-1] if uri.startswith('_'): uid = ''.join(uri.split('_')) return '_', uid if '#' in uri: ns, local = rsplit(uri, '#', 1) return ns + '#', local if '/' in uri: ns, local = rsplit(uri, '/', 1) return ns + '/', local return NO_URI, uri def _fromkey(key): if key.startswith("<") and key.endswith(">"): key = key[1:-1].decode("UTF-8") if key.startswith("_"): key = ''.join(splituri(key)) return BNode(key) return URIRef(key) elif key.startswith("_"): return BNode(key) else: m = _literal.match(key) if m: d = m.groupdict() value = d["value"] value = unquote(value) value = value.decode("UTF-8") lang = d["lang"] or '' datatype = d["datatype"] return Literal(value, lang, datatype) else: msg = "Unknown Key Syntax: '%s'" % key raise Exception(msg) def _tokey(term): if isinstance(term, URIRef): term = term.encode("UTF-8") if not '#' in term and not '/' in term: term = '%s%s' % (NO_URI, term) return '<%s>' % term elif isinstance(term, BNode): return '<%s>' % ''.join(splituri(term.encode("UTF-8"))) elif isinstance(term, Literal): language = term.language datatype = term.datatype value = quote(term.encode("UTF-8")) if language: language = language.encode("UTF-8") if datatype: datatype = datatype.encode("UTF-8") n3 = '"%s"@%s&<%s>' % (value, language, datatype) else: n3 = '"%s"@%s' % (value, language) else: if datatype: datatype = datatype.encode("UTF-8") n3 = '"%s"&<%s>' % (value, datatype) else: n3 = '"%s"' % value return n3 else: msg = "Unknown term Type for: %s" % term raise Exception(msg) class SQLObject(Store): context_aware = False __open = False _triples = Triples _literals = Literals _ns = Namespaces _prefix_ns = PrefixNamespace _resources = Resources tables = ('_triples', '_literals', '_ns', '_prefix_ns', '_resources') def __init__(self): pass def open(self, uri, create=True): if self.__open: return self.__open = True self.connection = connection = connectionForURI(uri) # useful for debugging # self.connection.debug = True for att in self.tables: table = getattr(self, att) table._connection = connection try: table.createTable(ifNotExists=create) except Exception, e: # TODO: should catch more specific exception _logger.warning(e) return 0 self.transaction = transaction = connection.transaction() for att in self.tables: table = getattr(self, att) table._connection = transaction return 1 def close(self): if not self.__open: raise ValueError, 'Not open' self.__open = False self.transaction.commit() def _makeHash(self, value): # XXX We will be using python's hash, but it should be a database # hash eventually. return hash(value) def _insertLiteral(self, value): v_hash = self._makeHash(value) lit = self._literals if not lit.select(lit.q.hash == v_hash).count(): lit(hash=v_hash, value=value) return v_hash def _makeURIHash(self, value=None, namespace=None, local_name=None): if namespace is None and local_name is None: namespace, local_name = splituri(value) ns_hash = self._makeHash(namespace) rsrc_hash = self._makeHash((ns_hash, local_name)) return ns_hash, rsrc_hash def _insertURI(self, value=None, namespace=None, local_name=None): if namespace is None and local_name is None: namespace, local_name = splituri(value) ns_hash, rsrc_hash = self._makeURIHash(value, namespace, local_name) ns = self._ns if not ns.select(ns.q.hash == ns_hash).count(): ns(hash=ns_hash, value=namespace) rsrc = self._resources if not rsrc.select(rsrc.q.hash == rsrc_hash).count(): rsrc(hash=rsrc_hash, ns=ns_hash, name=local_name) return rsrc_hash def _insertTriple(self, s_hash, p_hash, o_hash, objtype=URI): trip = self._triples clause = AND(trip.q.subject == s_hash, trip.q.predicate == p_hash, trip.q.object == o_hash) if not trip.select(clause).count(): trip(subject=s_hash, predicate=p_hash, object=o_hash, objtype=objtype) def tokey(self, obj): if isinstance(obj, (URIRef, BNode)): return URI, self._makeURIHash(_tokey(obj))[1] elif isinstance(obj, Literal): return LITERAL, self._makeHash(_tokey(obj)) elif obj is Any: return None, Any raise ValueError, obj def insert(self, obj): if isinstance(obj, (URIRef, BNode)): return URI, self._insertURI(_tokey(obj)) elif isinstance(obj, Literal): return LITERAL, self._insertLiteral(_tokey(obj)) raise ValueError, obj def add(self, (subject, predicate, object), context=None): """\ Add a triple to the store of triples. """ tokey = self.insert ts, s = tokey(subject) tp, p = tokey(predicate) to, o = tokey(object) self._insertTriple(s, p, o, to) def remove(self, (subject, predicate, object), context=None): tokey = self.tokey where_clause = '' if subject is not Any: ts, s = tokey(subject) where_clause += 'subject = %s' % s if predicate is not Any: if where_clause: where_clause += ' AND ' tp, p = tokey(predicate) where_clause += 'predicate = %s' % p if object is not Any: if where_clause: where_clause += ' AND ' to, o = tokey(object) where_clause += 'object = %s AND objtype = %s' % (o, to) trip = self._triples conn = trip._connection query = 'DELETE from %s' % conn.sqlrepr(trip.q) if where_clause: query += ' WHERE %s' % where_clause conn.query(query) def triples(self, (subject, predicate, object), context=None): conn = self._triples._connection tokey = self.tokey where_clause = '' if subject is not Any: ts, s = tokey(subject) where_clause += 'r1.hash = %s' % s if predicate is not Any: if where_clause: where_clause += ' AND ' tp, p = tokey(predicate) where_clause += 'r2.hash = %s' % p if object is not Any: if where_clause: where_clause += ' AND ' to, o = tokey(object) if to == URI: where_clause += 'r3.hash = %s' % o else: where_clause += 'l.hash = %s' % o query = ("SELECT '<'||n1.value||r1.name||'>' AS subj, " "'<'||n2.value||r2.name||'>' AS pred, " "CASE WHEN t.objtype = %d " "THEN '<'||n3.value||r3.name||'>' " "ELSE l.value END AS obj, " "l.hash, r3.hash " "FROM resources r1, resources r2, " "namespaces n1, namespaces n2, triples t " "LEFT JOIN literals l ON t.object = l.hash " "LEFT JOIN resources r3 ON t.object = r3.hash " "LEFT JOIN namespaces n3 ON r3.ns = n3.hash " "WHERE t.subject = r1.hash AND " "r1.ns = n1.hash AND " "t.predicate = r2.hash AND " "r2.ns = n2.hash" % URI) if where_clause: query += ' AND %s' % where_clause query += ' ORDER BY subj, pred' for t in conn.queryAll(query): triple = _fromkey(t[0]), _fromkey(t[1]), _fromkey(t[2]) yield triple def namespace(self, prefix): prefix = prefix.encode("utf-8") pns = self._prefix_ns res = pns.select(pns.q.prefix == prefix) if not res.count(): return None return iter(res).next().ns def prefix(self, namespace): namespace = namespace.encode("utf-8") pns = self._prefix_ns res = pns.select(pns.q.ns == namespace) if not res.count(): return None return iter(res).next().prefix def bind(self, prefix, namespace): if namespace[-1] == "-": raise Exception("??") pns = self._prefix_ns prefix = prefix.encode("utf-8") namespace = namespace.encode("utf-8") res = pns.select(AND(pns.q.ns == namespace, pns.q.prefix == prefix)) if not res.count(): pns(prefix=prefix, ns=namespace) def namespaces(self): pns = self._prefix_ns for p in pns.select(): yield p.prefix, URIRef(p.ns) def __len__(self): return self._triples.select().count() rdflib-2.4.2/rdflib/store/FOPLRelationalModel/0000755000175000017500000000000011204354476020063 5ustar nachonachordflib-2.4.2/rdflib/store/FOPLRelationalModel/QuadSlot.py0000644000175000017500000000560011153616032022162 0ustar nachonacho""" Utility functions associated with RDF terms: - normalizing (to 64 bit integers via half-md5-hashes) - escaping literal's for SQL persistence """ from rdflib.BNode import BNode from rdflib import RDF from rdflib.Literal import Literal from rdflib.URIRef import URIRef import md5 from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store.REGEXMatching import REGEXTerm Any = None SUBJECT = 0 PREDICATE = 1 OBJECT = 2 CONTEXT = 3 DATATYPE_INDEX = CONTEXT + 1 LANGUAGE_INDEX = CONTEXT + 2 SlotPrefixes = { SUBJECT : 'subject', PREDICATE : 'predicate', OBJECT : 'object', CONTEXT : 'context', DATATYPE_INDEX : 'dataType', LANGUAGE_INDEX : 'language' } POSITION_LIST = [SUBJECT,PREDICATE,OBJECT,CONTEXT] def EscapeQuotes(qstr): """ Ported from Ft.Lib.DbUtil """ if qstr is None: return '' tmp = qstr.replace("\\","\\\\") tmp = tmp.replace("'", "\\'") return tmp def dereferenceQuad(index,quad): assert index <= LANGUAGE_INDEX, "Invalid Quad Index" if index == DATATYPE_INDEX: return isinstance(quad[OBJECT],Literal) and quad[OBJECT].datatype or None elif index == LANGUAGE_INDEX: return isinstance(quad[OBJECT],Literal) and quad[OBJECT].language or None else: return quad[index] def genQuadSlots(quads): return [QuadSlot(index,quads[index])for index in POSITION_LIST] def normalizeValue(value,termType): if value is None: value = u'http://www.w3.org/2002/07/owl#NothingU' else: value = (isinstance(value,Graph) and value.identifier or str(value)) + termType return int(md5.new(isinstance(value,unicode) and value.encode('utf-8') or value).hexdigest()[:16],16) def normalizeNode(node): return normalizeValue(node, term2Letter(node)) class QuadSlot: def __repr__(self): #NOTE: http://docs.python.org/ref/customization.html return "QuadSlot(%s,%s,%s)"%(SlotPrefixes[self.position],self.term,self.md5Int) def __init__(self,position,term): assert position in POSITION_LIST, "Unknown quad position: %s"%position self.position = position self.term = term self.md5Int = normalizeValue(term,term2Letter(term)) self.termType = term2Letter(term) def EscapeQuotes(self,qstr): """ Ported from Ft.Lib.DbUtil """ if qstr is None: return '' tmp = qstr.replace("\\","\\\\") tmp = tmp.replace("'", "\\'") return tmp def normalizeTerm(self): if isinstance(self.term,(QuotedGraph,Graph)): return self.term.identifier.encode('utf-8') elif isinstance(self.term,Literal): return self.EscapeQuotes(self.term).encode('utf-8') elif self.term is None or isinstance(self.term,(list,REGEXTerm)): return self.term else: return self.term.encode('utf-8')rdflib-2.4.2/rdflib/store/FOPLRelationalModel/RelationalHash.py0000644000175000017500000002512611153616032023331 0ustar nachonacho""" This module implements two hash tables for identifiers and values that facilitate maximal index lookups and minimal redundancy (since identifiers and values are stored once only and referred to by integer half-md5-hashes). The identifier hash uses the half-md5-hash (converted by base conversion to an integer) to key on the identifier's full lexical form (for partial matching by REGEX) and their term types. The use of a half-hash introduces a collision risk that is currently not accounted for. The volume at which the risk becomes significant is calculable, though through the 'birthday paradox'. The value hash is keyed off the half-md5-hash (as an integer also) and stores the identifier's full lexical representation (for partial matching by REGEX) These classes are meant to automate the creation, management, linking, insertion of these hashes (by SQL) automatically see: http://en.wikipedia.org/wiki/Birthday_Paradox """ from rdflib import BNode from rdflib import RDF from rdflib.Literal import Literal from rdflib.URIRef import URIRef from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store.REGEXMatching import REGEXTerm from QuadSlot import POSITION_LIST, normalizeValue Any = None COLLISION_DETECTION = False REGEX_IDX = False CREATE_HASH_TABLE = """ CREATE TABLE %s ( %s ) ENGINE=InnoDB;""" IDENTIFIER_GARBAGE_COLLECTION_SQL="CREATE TEMPORARY TABLE danglingIds SELECT %s.%s FROM %s %s where %s and %s.%s <> %s;" VALUE_GARBAGE_COLLECTION_SQL="CREATE TEMPORARY TABLE danglingIds SELECT %s.%s FROM %s %s where %s" PURGE_KEY_SQL="DELETE %s FROM %s INNER JOIN danglingIds on danglingIds.%s = %s.%s;" def GarbageCollectionQUERY(idHash,valueHash,aBoxPart,binRelPart,litPart): """ Performs garbage collection on interned identifiers and their references. Joins the given KB parititions against the identifiers and values and removes the 'danglers'. This must be performed after every removal of an assertion and so becomes a primary bottleneck """ purgeQueries = ["drop temporary table if exists danglingIds"] rdfTypeInt = normalizeValue(RDF.type,'U') idHashKeyName = idHash.columns[0][0] valueHashKeyName = valueHash.columns[0][0] idHashJoinees = [aBoxPart,binRelPart,litPart] idJoinClauses = [] idJoinColumnCandidates = [] explicitJoins = [] for part in idHashJoinees: partJoinClauses = [] for colName in part.columnNames: if part.columnNames.index(colName) >= 4: colName,sqlType,index = colName if sqlType.lower()[:6]=='bigint': partJoinClauses.append("%s.%s = %s.%s"%(part,colName,idHash,idHashKeyName)) idJoinColumnCandidates.append("%s.%s"%(part,colName)) elif colName: partJoinClauses.append("%s.%s = %s.%s"%(part,colName,idHash,idHashKeyName)) idJoinColumnCandidates.append("%s.%s"%(part,colName)) explicitJoins.append("left join %s on (%s)"%(part,' or '.join(partJoinClauses))) idJoinClauses.extend(partJoinClauses) intersectionClause = " and ".join([col + " is NULL" for col in idJoinColumnCandidates]) idGCQuery = IDENTIFIER_GARBAGE_COLLECTION_SQL%( idHash, idHashKeyName, idHash, ' '.join(explicitJoins), intersectionClause, idHash, idHashKeyName, rdfTypeInt ) idPurgeQuery = PURGE_KEY_SQL%(idHash,idHash,idHashKeyName,idHash,idHashKeyName) purgeQueries.append(idGCQuery) purgeQueries.append(idPurgeQuery) partJoinClauses = [] idJoinColumnCandidates = [] explicitJoins = [] partJoinClauses.append("%s.%s = %s.%s"%(litPart,litPart.columnNames[OBJECT],valueHash,valueHashKeyName)) idJoinColumnCandidates.append("%s.%s"%(litPart,litPart.columnNames[OBJECT])) intersectionClause = " and ".join([col + " is NULL" for col in idJoinColumnCandidates]) valueGCQuery = VALUE_GARBAGE_COLLECTION_SQL%( valueHash, valueHashKeyName, valueHash, "left join %s on (%s)"%(litPart,' or '.join(partJoinClauses)), intersectionClause ) valuePurgeQuery = PURGE_KEY_SQL%(valueHash,valueHash,valueHashKeyName,valueHash,valueHashKeyName) purgeQueries.append("drop temporary table if exists danglingIds") purgeQueries.append(valueGCQuery) purgeQueries.append(valuePurgeQuery) return purgeQueries class RelationalHash: def __init__(self,identifier): self.identifier = identifier self.hashUpdateQueue = {} def defaultSQL(self): return '' def EscapeQuotes(self,qstr): if qstr is None: return '' tmp = qstr.replace("\\","\\\\") tmp = tmp.replace("'", "\\'") return tmp def normalizeTerm(self,term): if isinstance(term,(QuotedGraph,Graph)): return term.identifier.encode('utf-8') elif isinstance(term,Literal): return self.EscapeQuotes(term).encode('utf-8') elif term is None or isinstance(term,(list,REGEXTerm)): return term else: return term.encode('utf-8') def __repr__(self): return "%s_%s"%(self.identifier,self.tableNameSuffix) def IndexManagementSQL(self,create=False): idxSQLStmts = []#'ALTER TABLE %s DROP PRIMARY KEY'%self] for colName,colType,indexMD in self.columns: if indexMD: indexName,indexCol = indexMD if indexName: if create: idxSQLStmts.append("create INDEX %s on %s (%s)"%(indexName,self,indexCol)) else: idxSQLStmts.append("drop INDEX %s on %s"%(indexName,self)) return idxSQLStmts def createSQL(self): columnSQLStmts = [] for colName,colType,indexMD in self.columns: if indexMD: indexName,indexCol = indexMD if indexName: columnSQLStmts.append("\t%s\t%s not NULL"%(colName,colType)) columnSQLStmts.append("\tINDEX %s (%s)"%(indexName,indexCol)) else: columnSQLStmts.append("\t%s\t%s not NULL PRIMARY KEY"%(colName,colType)) else: columnSQLStmts.append("\t%s\t%s not NULL"%(colName,colType)) return CREATE_HASH_TABLE%( self, ',\n'.join(columnSQLStmts) ) def dropSQL(self): pass class IdentifierHash(RelationalHash): columns = [ ('id','BIGINT unsigned',[None,'id']), ('term_type',"enum('U','B','F','V','L')",['termTypeIndex','term_type']), ] tableNameSuffix = 'identifiers' def viewUnionSelectExpression(self,relations_only=False): return "select * from %s"%(repr(self)) def defaultSQL(self): """ Since rdf:type is modeled explicitely (in the ABOX partition) it must be inserted as a 'default' identifier """ return 'INSERT into %s values (%s,"U","%s");'%(self,normalizeValue(RDF.type,'U'),RDF.type) def generateDict(self,db): c=db.cursor() c.execute("select * from %s"%self) rtDict = {} for rt in c.fetchall(): rtDict[rt[0]] = (rt[1],rt[2]) c.close() return rtDict def updateIdentifierQueue(self,termList): for term,termType in termList: md5Int = normalizeValue(term,termType) self.hashUpdateQueue[md5Int]=(termType,self.normalizeTerm(term)) def insertIdentifiers(self,db): c=db.cursor() keyCol = self.columns[0][0] if self.hashUpdateQueue: params = [(md5Int,termType,lexical) for md5Int,(termType,lexical) in self.hashUpdateQueue.items()] c.executemany("INSERT IGNORE INTO %s"%(self)+" VALUES (%s,%s,%s)",params) if COLLISION_DETECTION: insertedIds = self.hashUpdateQueue.keys() if len(insertedIds) > 1: c.execute("SELECT * FROM %s"%(self)+" WHERE %s"%keyCol+" in %s",(tuple(insertedIds),)) else: c.execute("SELECT * FROM %s"%(self)+" WHERE %s"%keyCol+" = %s",tuple(insertedIds)) for key,termType,lexical in c.fetchall(): if self.hashUpdateQueue[key] != (termType,lexical): #Collision!!! Raise an exception (allow the app to rollback the transaction if it wants to) raise Exception("Hash Collision (in %s) on %s,%s vs %s,%s!"%(self,termType,lexical,self.hashUpdateQueue[key][0],self.hashUpdateQueue[key][1])) self.hashUpdateQueue = {} c.close() class LiteralHash(RelationalHash): columns = [('id','BIGINT unsigned',[None,'id']),] tableNameSuffix = 'literals' def viewUnionSelectExpression(self,relations_only=False): return "select %s, 'L' as term_type, lexical from %s"%(self.columns[0][0],repr(self)) def generateDict(self,db): c=db.cursor() c.execute("select * from %s"%self) rtDict = {} for rt in c.fetchall(): rtDict[rt[0]] = rt[1] c.close() return rtDict def updateIdentifierQueue(self,termList): for term,termType in termList: md5Int = normalizeValue(term,termType) self.hashUpdateQueue[md5Int]=self.normalizeTerm(term) def insertIdentifiers(self,db): c=db.cursor() keyCol = self.columns[0][0] if self.hashUpdateQueue: params = [(md5Int,lexical) for md5Int,lexical in self.hashUpdateQueue.items()] c.executemany("INSERT IGNORE INTO %s"%(self)+" VALUES (%s,%s)",params) if COLLISION_DETECTION: insertedIds = self.hashUpdateQueue.keys() if len(insertedIds) > 1: c.execute("SELECT * FROM %s"%(self)+" WHERE %s"%keyCol+" in %s",(tuple(insertedIds),)) else: c.execute("SELECT * FROM %s"%(self)+" WHERE %s"%keyCol+" = %s",tuple(insertedIds)) for key,lexical in c.fetchall(): if self.hashUpdateQueue[key] != lexical: #Collision!!! Raise an exception (allow the app to rollback the transaction if it wants to) raise Exception("Hash Collision (in %s) on %s vs %s!"%(self,lexical,self.hashUpdateQueue[key][0])) self.hashUpdateQueue = {} c.close() if REGEX_IDX: LiteralHash.columns.append(('lexical','text',['lexicalIndex','lexical(100)']),) IdentifierHash.columns.append(('lexical','text',['lexical_index','lexical(100)'])) else: LiteralHash.columns.append(('lexical','text',None)) IdentifierHash.columns.append(('lexical','text',None))rdflib-2.4.2/rdflib/store/FOPLRelationalModel/__init__.py0000644000175000017500000000000011153616032022152 0ustar nachonachordflib-2.4.2/rdflib/store/FOPLRelationalModel/BinaryRelationPartition.py0000644000175000017500000010677611153616032025262 0ustar nachonacho""" The set of classes used to model the 3 'partitions' for N3 assertions. There is a top level class which implements operations common to all partitions as well as a class for each partition. These classes are meant to allow the underlying SQL schema to be completely configurable as well as to automate the generation of SQL queries for adding,updating,removing,resolving triples from the partitions. These classes work in tandem with the RelationHashes to automate all (or most) of the SQL processing associated with this FOPL Relational Model NOTE: The use of foreign keys (which - unfortunately - bumps the minimum MySQL version to 5.0) allows for the efficient removal of all statements about a particular resource using cascade on delete (currently not used) see: http://dev.mysql.com/doc/refman/5.0/en/ansi-diff-foreign-keys.html """ from rdflib.URIRef import URIRef from rdflib import BNode from rdflib import RDF from rdflib.Literal import Literal from rdflib.URIRef import URIRef from pprint import pprint from rdflib.term_utils import * from rdflib.store.REGEXMatching import REGEXTerm from QuadSlot import * Any = None EXPLAIN_INFO = False CONTEXT_COLUMN = 'context' ANY_TERM = ['U','B','F','V','L'] CONTEXT_TERMS = ['U','B','F'] IDENTIFIER_TERMS = ['U','B'] GROUND_IDENTIFIERS = ['U'] NON_LITERALS = ['U','B','F','V'] CLASS_TERMS = ['U','B','V'] PREDICATE_NAMES = ['U','V'] NAMED_BINARY_RELATION_PREDICATES = GROUND_IDENTIFIERS NAMED_BINARY_RELATION_OBJECTS = ['U','B','L'] NAMED_LITERAL_PREDICATES = GROUND_IDENTIFIERS NAMED_LITERAL_OBJECTS = ['L'] ASSOCIATIVE_BOX_CLASSES = GROUND_IDENTIFIERS CREATE_BRP_TABLE = """ CREATE TABLE %s ( %s ) ENGINE=InnoDB""" LOOKUP_INTERSECTION_SQL = "INNER JOIN %s %s ON (%s)" LOOKUP_UNION_SQL = "LEFT JOIN %s %s ON (%s)" class BinaryRelationPartition(object): """ The common ancestor of the three partitions for assertions. Implements behavior common to all 3. Each subclass is expected to define the following: nameSuffix - The suffix appended to the name of the table termEnumerations - a 4 item list (for each quad 'slot') of lists (or None) which enumerate the allowable term types for each quad slot (one of 'U' - URIs,'V' - Variable,'L' - Literals,'B' - BNodes,'F' - Formulae) columnNames - a list of column names for each quad slot (can be of additional length where each item is a 3-item tuple of: column name, column type, index) columnIntersectionList - a list of 2 item tuples (the quad index and a boolean indicating whether or not the associated term is an identifier) this list (the order of which is very important) is used for generating intersections between the partition and the identifier / value hash hardCodedResultFields - a dictionary mapping quad slot indices to their hardcoded value (for partitions - such as ABOX - which have a hardcoded value for a particular quad slot) hardCodedResultTermsTypes - a dictionary mapping quad slot indices to their hardcoded term type (for partitions - such as Literal properties - which have hardcoded values for a particular quad slot's term type) """ assertedColumnName = 'asserted' indexSuffix = 'Index' literalTable = False objectPropertyTable = False def __init__(self,identifier,idHash,valueHash): self.identifier = identifier self.idHash = idHash self.valueHash = valueHash self._repr = self.identifier+'_'+self.nameSuffix self.singularInsertionSQLCmd = self.insertRelationsSQLCMD() self._resetPendingInsertions() self._intersectionSQL = self.generateHashIntersections() self._selectFieldsLeading = self._selectFields(True) + ['NULL as '+SlotPrefixes[DATATYPE_INDEX],'NULL as '+SlotPrefixes[LANGUAGE_INDEX]] self._selectFieldsNonLeading = self._selectFields(False) + ['NULL','NULL'] def __repr__(self): return self._repr def foreignKeySQL(self,slot): """ Generates foreign key expression relating a particular quad term with the identifier hash """ rt = ["\tCONSTRAINT %s_%s_lookup FOREIGN KEY (%s) REFERENCES %s (%s)"%( self, self.columnNames[slot], self.columnNames[slot], self.idHash, self.idHash.columns[0][0])] return rt def IndexManagementSQL(self,ignoreFK=False,create=False): idxSQLStmts = [] for slot in POSITION_LIST: if self.columnNames[slot]: if create: idxSQLStmts.append("create INDEX %s%s on %s (%s)"%(self.columnNames[slot],self.indexSuffix,self,self.columnNames[slot])) if not ignoreFK: idxSQLStmts.append("ALTER TABLE %s ADD %s"%(self,self.foreignKeySQL(slot)[0])) else: if not ignoreFK: idxSQLStmts.append("ALTER TABLE %s DROP FOREIGN KEY %s_%s_lookup"%(self,self,self.columnNames[slot])) idxSQLStmts.append("ALTER TABLE %s DROP INDEX %s%s"%(self,self.columnNames[slot],self.indexSuffix)) if self.termEnumerations[slot]: if create: idxSQLStmts.append("create INDEX %s_term%s on %s (%s_term)"%(self.columnNames[slot],self.indexSuffix,self,self.columnNames[slot])) else: idxSQLStmts.append("drop index %s_term%s on %s"%(self.columnNames[slot],self.indexSuffix,self)) if len(self.columnNames) > 4: for otherSlot in range(4,len(self.columnNames)): colMD = self.columnNames[otherSlot] if isinstance(colMD,tuple): colName,colType,indexStr = colMD if create: idxSQLStmts.append("create INDEX %s%s on %s (%s)"%(colName,self.indexSuffix,self,indexStr%colName)) else: idxSQLStmts.append("drop index %s%s on %s"%(colName,self.indexSuffix,self)) else: if create: idxSQLStmts.append("create INDEX %s%s on (%s)"%(colMD,self.indexSuffix,self,colMD)) if not ignoreFK: idxSQLStmts.append("ALTER TABLE %s ADD %s"%(self,self.foreignKeySQL(otherSlot)[0])) else: if not ignoreFK: idxSQLStmts.append("ALTER TABLE %s DROP FOREIGN KEY %s_%s_lookup"%(self,self,colMD)) idxSQLStmts.append("drop index %s%s on %s"%(colMD,self.indexSuffix,self)) return idxSQLStmts def createSQL(self): """ Generates a CREATE TABLE statement which creates a SQL table used for persisting assertions associated with this partition """ columnSQLStmts = [] for slot in POSITION_LIST: if self.columnNames[slot]: columnSQLStmts.append("\t%s\tBIGINT unsigned not NULL"%(self.columnNames[slot])) columnSQLStmts.append("\tINDEX %s%s (%s)"%(self.columnNames[slot],self.indexSuffix,self.columnNames[slot])) if self.termEnumerations[slot]: columnSQLStmts.append("\t%s_term enum(%s) not NULL"%(self.columnNames[slot],','.join(["'%s'"%tType for tType in self.termEnumerations[slot]]))) columnSQLStmts.append("\tINDEX %s_term%s (%s_term)"%(self.columnNames[slot],self.indexSuffix,self.columnNames[slot])) columnSQLStmts.extend(self.foreignKeySQL(slot)) if len(self.columnNames) > 4: for otherSlot in range(4,len(self.columnNames)): colMD = self.columnNames[otherSlot] if isinstance(colMD,tuple): colName,colType,indexStr = colMD columnSQLStmts.append("\t%s %s"%(colName,colType)) columnSQLStmts.append("\tINDEX %s%s (%s)"%(colName,self.indexSuffix,indexStr%colName)) else: columnSQLStmts.append("\t%s BIGINT unsigned not NULL"%colMD) columnSQLStmts.append("\tINDEX %s%s (%s)"%(colMD,self.indexSuffix,colMD)) columnSQLStmts.extend(self.foreignKeySQL(otherSlot)) return CREATE_BRP_TABLE%( self, ',\n'.join(columnSQLStmts) ) def _resetPendingInsertions(self): """ Resets the cache for pending insertions """ self.pendingInsertions = [] def insertRelationsSQLCMD(self): """ Generates a SQL command with parameter references (%s) in order to facilitate efficient batch insertion of multiple assertions by Python DB implementations (such as MySQLdb) """ vals = 0 insertColNames = [] for colName in self.columnNames: colIdx = self.columnNames.index(colName) if colName: insertColNames.append(colName) vals += 1 if colIdx < len(self.termEnumerations) and self.termEnumerations[colIdx]: insertColNames.append(colName+'_term') vals += 1 insertColsExpr = "(%s)"%(','.join([isinstance(i,tuple) and i[0] or i for i in insertColNames])) return "INSERT INTO %s %s VALUES "%(self,insertColsExpr)+"(%s)"%(','.join(['%s' for i in range(vals)])) def insertRelations(self,quadSlots): """ Takes a list of QuadSlot objects and queues the new identifiers / values to insert and the assertions as well (so they can be added in a batch for maximum efficiency) """ for quadSlot in quadSlots: self.extractIdentifiers(quadSlot) self.pendingInsertions.append(self.compileQuadToParams(quadSlot)) def flushInsertions(self,db): """ Adds the pending identifiers / values and assertions (using executemany for maximum efficiency), and resets the queue. """ self.idHash.insertIdentifiers(db) self.valueHash.insertIdentifiers(db) cursor = db.cursor() cursor.executemany(self.singularInsertionSQLCmd,self.pendingInsertions) cursor.close() self._resetPendingInsertions() def viewUnionSelectExpression(self,relations_only=False): """ Return a SQL statement which creates a view of all the RDF statements from all the contributing partitions """ rt=[] if relations_only and self.objectPropertyTable: return "select * from %s"%repr(self) for idx in range(len(POSITION_LIST)): rdfTermLabel=SlotPrefixes[idx] if idx < len(self.columnNames) and self.columnNames[idx]: #there is a matching column rt.append(self.columnNames[idx]+' as %s'%rdfTermLabel) if self.termEnumerations[idx]: #there is a corresponding term enumeration rt.append(self.columnNames[idx]+'_term as %s_term'%rdfTermLabel) else: #no corresponding term enumeration (hardcoded) rt.append("'%s' as %s_term"%(self.hardCodedResultTermsTypes[idx], rdfTermLabel)) else: assert self.hardCodedResultFields[idx] == RDF.type rt.append("'%s' as %s"%(normalizeValue(self.hardCodedResultFields[idx],'U'), rdfTermLabel)) if self.hardCodedResultTermsTypes[idx]: rt.append("'%s' as %s_term"%(self.hardCodedResultTermsTypes[idx], rdfTermLabel)) if not relations_only: if self.literalTable: for i in self.columnNames[-2:]: rt.append(i[0]) else: rt.append('NULL as data_type') rt.append('NULL as language') return "select %s from %s"%(', '.join(rt),repr(self)) def selectContextFields(self,first): """ Generates a list of column aliases for the SELECT SQL command used in order to fetch contexts from each partition """ rt = [] idHashLexicalCol = self.idHash.columns[-1][0] idHashTermTypeCol = self.idHash.columns[-2][0] termNameAlias = first and ' as %s'%SlotPrefixes[CONTEXT] or '' rt.append('rt_'+SlotPrefixes[CONTEXT]+'.'+idHashLexicalCol + termNameAlias) termTypeAlias = first and ' as %sTermType'%SlotPrefixes[CONTEXT] or '' if self.termEnumerations[CONTEXT]: rt.append('rt_'+SlotPrefixes[CONTEXT]+'.'+idHashTermTypeCol+termTypeAlias) else: rt.append("'%s'"%self.hardCodedResultTermsTypes[CONTEXT]+termTypeAlias) return rt def _selectFields(self,first): rt = [] idHashLexicalCol = self.idHash.columns[-1][0] idHashTermTypeCol = self.idHash.columns[-2][0] for idx in range(len(POSITION_LIST)): termNameAlias = first and ' as %s'%SlotPrefixes[idx] or '' if idx < len(self.columnNames) and self.columnNames[idx]: rt.append('rt_'+SlotPrefixes[idx]+'.'+idHashLexicalCol + termNameAlias) termTypeAlias = first and ' as %sTermType'%SlotPrefixes[idx] or '' if self.termEnumerations[idx]: rt.append('rt_'+SlotPrefixes[idx]+'.'+idHashTermTypeCol+termTypeAlias) else: rt.append("'%s'"%self.hardCodedResultTermsTypes[idx]+termTypeAlias) else: rt.append("'%s'"%self.hardCodedResultFields[idx]+termNameAlias) if self.hardCodedResultTermsTypes[idx]: rt.append("'%s'"%self.hardCodedResultTermsTypes[idx]+termNameAlias) return rt def selectFields(self,first=False): """ Returns a list of column aliases for the SELECT SQL command used to fetch quads from a partition """ return first and self._selectFieldsLeading or self._selectFieldsNonLeading def generateHashIntersections(self): """ Generates the SQL JOINS (INNER and LEFT) used to intersect the identifier and value hashes with this partition. This relies on each parition setting up an ordered list of intersections (ordered with optimization in mind). For instance the ABOX partition would want to intersect on classes first (since this will have a lower cardinality than any other field) wherease the Literal Properties partition would want to intersect on datatypes first. The paritions and hashes are joined on the integer half-MD5-hash of the URI (or literal) as well as the 'Term Type' """ intersections = [] for idx,isId in self.columnIntersectionList: lookup = isId and self.idHash or self.valueHash lookupAlias = idx < len(POSITION_LIST) and 'rt_'+SlotPrefixes[idx] or 'rt_'+self.columnNames[idx][0] lookupKeyCol = lookup.columns[0][0] if idx < len(POSITION_LIST) or len(self.columnNames) > len(POSITION_LIST): colName = idx < len(POSITION_LIST) and self.columnNames[idx] or self.columnNames[idx][0] intersectionClauses = ["%s.%s = %s.%s"%(self,colName,lookupAlias,lookupKeyCol)] if idx < len(POSITION_LIST) and self.termEnumerations[idx]: intersectionClauses.append("%s.%s_term = %s.%s"%(self,colName,lookupAlias,lookup.columns[1][0])) if isId and idx < len(POSITION_LIST) and idx in self.hardCodedResultTermsTypes: intersectionClauses.append("%s.%s = '%s'"%(lookupAlias,lookup.columns[1][0],self.hardCodedResultTermsTypes[idx])) if idx == DATATYPE_INDEX and len(self.columnNames) > len(POSITION_LIST): intersections.append(LOOKUP_UNION_SQL%(lookup,lookupAlias,' AND '.join(intersectionClauses))) else: intersections.append(LOOKUP_INTERSECTION_SQL%(lookup,lookupAlias,' AND '.join(intersectionClauses))) return ' '.join(intersections) def generateWhereClause(self,queryPattern): """ Takes a query pattern (a list of quad terms - subject,predicate,object,context) and generates a SQL WHERE clauses which works in conjunction to the intersections to filter the result set by partial matching (by REGEX), full matching (by integer half-hash), and term types. For maximally efficient SELECT queries """ whereClauses = [] whereParameters = [] asserted = dereferenceQuad(CONTEXT,queryPattern) is None for idx in SlotPrefixes.keys(): queryTerm = dereferenceQuad(idx,queryPattern) lookupAlias = 'rt_'+SlotPrefixes[idx] if idx == CONTEXT and asserted: whereClauses.append("%s.%s_term != 'F'"%(self,self.columnNames[idx])) if idx < len(POSITION_LIST) and isinstance(queryTerm,REGEXTerm): whereClauses.append("%s.lexical REGEXP "%lookupAlias+"%s") whereParameters.append(queryTerm) elif idx == CONTEXT and isinstance(queryTerm,Graph) and isinstance(queryTerm.identifier,REGEXTerm): whereClauses.append("%s.lexical REGEXP "%lookupAlias+"%s") whereParameters.append(queryTerm.identifier) elif idx < len(POSITION_LIST) and queryTerm is not Any: if self.columnNames[idx]: if isinstance(queryTerm,list): whereClauses.append("%s.%s"%(self,self.columnNames[idx])+" in (%s)"%','.join(['%s' for item in range(len(queryTerm))])) whereParameters.extend([normalizeValue(item,term2Letter(item)) for item in queryTerm]) else: whereClauses.append("%s.%s"%(self,self.columnNames[idx])+" = %s") whereParameters.append(normalizeValue(queryTerm,term2Letter(queryTerm))) if not idx in self.hardCodedResultTermsTypes and self.termEnumerations[idx] and not isinstance(queryTerm,list): whereClauses.append("%s.%s_term"%(self,self.columnNames[idx])+" = %s") whereParameters.append(term2Letter(queryTerm)) elif idx >= len(POSITION_LIST) and len(self.columnNames) > len(POSITION_LIST) and queryTerm is not None: compVal = idx == DATATYPE_INDEX and normalizeValue(queryTerm,term2Letter(queryTerm)) or queryTerm whereClauses.append("%s.%s"%(self,self.columnNames[idx][0])+" = %s") whereParameters.append(compVal) return ' AND '.join(whereClauses),whereParameters# + "#{%s}\n"%(str(queryPattern)),whereParameters class AssociativeBox(BinaryRelationPartition): """ The partition associated with assertions of class membership (formally known - in Description Logics - as an Associative Box) This partition is for all assertions where the property is rdf:type see: http://en.wikipedia.org/wiki/Description_Logic#Modelling_in_Description_Logics """ nameSuffix = 'associativeBox' termEnumerations=[NON_LITERALS,None,CLASS_TERMS,CONTEXT_TERMS] columnNames = ['member',None,'class',CONTEXT_COLUMN] columnIntersectionList = [ (OBJECT,True), (CONTEXT,True), (SUBJECT,True)] hardCodedResultFields = { PREDICATE : RDF.type, } hardCodedResultTermsTypes = { PREDICATE : 'U', } def compileQuadToParams(self,quadSlots): subjSlot,predSlot,objSlot,conSlot = quadSlots return (subjSlot.md5Int, term2Letter(subjSlot.term), objSlot.md5Int, term2Letter(objSlot.term), conSlot.md5Int, term2Letter(conSlot.term)) def extractIdentifiers(self,quadSlots): subjSlot,predSlot,objSlot,conSlot = quadSlots self.idHash.updateIdentifierQueue([ (subjSlot.term,subjSlot.termType), (objSlot.term,objSlot.termType), (conSlot.term,conSlot.termType) ]) class NamedLiteralProperties(BinaryRelationPartition): """ The partition associated with assertions where the object is a Literal. """ nameSuffix = 'literalProperties' termEnumerations=[NON_LITERALS,PREDICATE_NAMES,None,CONTEXT_TERMS] columnNames = ['subject','predicate','object',CONTEXT_COLUMN,('data_type','BIGINT unsigned','%s'),('language','varchar(3)','%s(3)')] columnIntersectionList = [ (DATATYPE_INDEX,True), (PREDICATE,True), (CONTEXT,True), (OBJECT,False), (SUBJECT,True)] hardCodedResultFields = {} hardCodedResultTermsTypes = { OBJECT : 'L' } literalTable = True def foreignKeySQL(self,slot): hash = slot == OBJECT and self.valueHash or self.idHash rt = ["\tCONSTRAINT %s_%s_lookup FOREIGN KEY (%s) REFERENCES %s (%s)"%( self, self.columnNames[slot], self.columnNames[slot], hash, hash.columns[0][0])] return rt def __init__(self,identifier,idHash,valueHash): super(NamedLiteralProperties,self).__init__(identifier,idHash,valueHash) self.insertSQLCmds = { (False,False): self.insertRelationsSQLCMD(), (False,True) : self.insertRelationsSQLCMD(language=True), (True,False) : self.insertRelationsSQLCMD(dataType=True), (True,True) : self.insertRelationsSQLCMD(dataType=True,language=True) } idHashLexicalCol = self.idHash.columns[-1][0] self._selectFieldsLeading = self._selectFields(True) + \ [ 'rt_%s.%s'%(self.columnNames[DATATYPE_INDEX][0],idHashLexicalCol) + ' as %s'%SlotPrefixes[DATATYPE_INDEX], str(self)+'.'+self.columnNames[LANGUAGE_INDEX][0]+' as %s'%SlotPrefixes[LANGUAGE_INDEX], ] self._selectFields = self._selectFields(False) + \ [ 'rt_%s.%s'%(self.columnNames[DATATYPE_INDEX][0],idHashLexicalCol), str(self)+'.'+self.columnNames[LANGUAGE_INDEX][0], ] def _resetPendingInsertions(self): self.pendingInsertions = { (False,False): [], (False,True) : [], (True,False) : [], (True,True) : [], } def insertRelationsSQLCMD(self,dataType=None,language=None): vals = 0 insertColNames = [] for colName in self.columnNames: colIdx = self.columnNames.index(colName) if colName: if isinstance(colName,tuple): colName = colName[0] for argColName,arg in [(self.columnNames[DATATYPE_INDEX][0],dataType),(self.columnNames[LANGUAGE_INDEX][0],language)]: if colName == argColName and arg: insertColNames.append(colName) vals += 1 else: insertColNames.append(colName) vals += 1 if colIdx < len(self.termEnumerations) and self.termEnumerations[colIdx]: insertColNames.append(colName+'_term') vals += 1 insertColsExpr = "(%s)"%(','.join([i for i in insertColNames])) return "INSERT INTO %s %s VALUES "%(self,insertColsExpr)+"(%s)"%(','.join(['%s' for i in range(vals)])) def insertRelations(self,quadSlots): for quadSlot in quadSlots: self.extractIdentifiers(quadSlot) literal = quadSlot[OBJECT].term insertionCMDKey = (bool(literal.datatype),bool(literal.language)) self.pendingInsertions[insertionCMDKey].append(self.compileQuadToParams(quadSlot)) def flushInsertions(self,db): self.idHash.insertIdentifiers(db) self.valueHash.insertIdentifiers(db) cursor = db.cursor() for key,paramList in self.pendingInsertions.items(): if paramList: cursor.executemany(self.insertSQLCmds[key],paramList) cursor.close() self._resetPendingInsertions() def compileQuadToParams(self,quadSlots): subjSlot,predSlot,objSlot,conSlot = quadSlots dTypeParam = objSlot.term.datatype and normalizeValue(objSlot.term.datatype,'U') or None langParam = objSlot.term.language and objSlot.term.language or None rtList = [ subjSlot.md5Int, term2Letter(subjSlot.term), predSlot.md5Int, term2Letter(predSlot.term), objSlot.md5Int, conSlot.md5Int, term2Letter(conSlot.term)] for item in [dTypeParam,langParam]: if item: rtList.append(item) return tuple(rtList) def extractIdentifiers(self,quadSlots): """ Test literal data type extraction >>> from rdflib import RDF >>> class DummyClass: ... def __init__(self,test=False): ... self.test = test ... def updateIdentifierQueue(self,stuff): ... if self.test: ... term,termType = stuff[-1] ... assert termType == 'U',"Datatype's are URIs!" >>> class Tester(NamedLiteralProperties): ... def __init__(self): ... self.idHash = DummyClass(True) ... self.valueHash = DummyClass() >>> c = Tester() >>> slots = genQuadSlots([BNode(),RDF.first,Literal(1),BNode()]) >>> c.extractIdentifiers(slots) """ subjSlot,predSlot,objSlot,conSlot = quadSlots idTerms = [ (subjSlot.term,subjSlot.termType), (predSlot.term,predSlot.termType), (conSlot.term,conSlot.termType)] if objSlot.term.datatype: idTerms.append((objSlot.term.datatype,term2Letter(objSlot.term.datatype))) self.idHash.updateIdentifierQueue(idTerms) self.valueHash.updateIdentifierQueue([(objSlot.term,objSlot.termType)]) def selectFields(self,first=False): return first and self._selectFieldsLeading or self._selectFieldsNonLeading class NamedBinaryRelations(BinaryRelationPartition): """ Partition associated with assertions where the predicate isn't rdf:type and the object isn't a literal """ nameSuffix = 'relations' termEnumerations=[NON_LITERALS,PREDICATE_NAMES,NON_LITERALS,CONTEXT_TERMS] columnNames = ['subject','predicate','object',CONTEXT_COLUMN] columnIntersectionList = [ (PREDICATE,True), (CONTEXT,True), (OBJECT,True), (SUBJECT,True)] hardCodedResultFields = {} hardCodedResultTermsTypes = {} objectPropertyTable = True def compileQuadToParams(self,quadSlots): subjSlot,predSlot,objSlot,conSlot = quadSlots return (subjSlot.md5Int, term2Letter(subjSlot.term), predSlot.md5Int, term2Letter(predSlot.term), objSlot.md5Int, term2Letter(objSlot.term), conSlot.md5Int, term2Letter(conSlot.term)) def extractIdentifiers(self,quadSlots): subjSlot,predSlot,objSlot,conSlot = quadSlots self.idHash.updateIdentifierQueue([ (subjSlot.term,subjSlot.termType), (predSlot.term,predSlot.termType), (objSlot.term,objSlot.termType), (conSlot.term,conSlot.termType)]) def BinaryRelationPartitionCoverage((subject,predicate,object_,context),BRPs): """ This function takes a quad pattern (where any term is one of: URIRef,BNode,Literal,None,or REGEXTerm) ,a list of 3 live partitions and returns a list of only those partitions that need to be searched in order to resolve the pattern. This function relies on the BRPQueryDecisionMap dictionary to determine which partitions to use. Note that the dictionary as it is currently constituted requres that REGEXTerms in the object slot require that *both* the binary relation partition and the literal properties partitions are searched when this search could be limited to the literal properties only (for more efficient REGEX evaluation of literal values). Given the nature of the REGEX function in SPARQL and the way Versa matches by REGEX, this seperation couldn't be done """ if isinstance(predicate,list) and len(predicate) == 1: predicate = predicate[0] if isinstance(predicate,REGEXTerm): pId = predicate.compiledExpr.match(RDF.type) and 'RT' or 'U_RNT' elif isinstance(predicate,(URIRef,BNode)): pId = predicate == RDF.type and 'T' or 'U_RNT' elif predicate is None or predicate is []: pId = 'W' elif isinstance(predicate,list): if [p for p in predicate if p == RDF.type or isinstance(p,REGEXTerm) and p.compiledExpr.match(RDF.type)]: #One of the predicates is (or matches) rdf:type, so can be treated as a REGEX term that matches rdf:type pId = 'RT' else: #Otherwise, can be treated as a REGEXTerm that *doesn't* match rdf:type pId = 'U_RNT' elif isinstance(predicate,Variable): #Predicates as variables would only exist in literal property assertions and 'other' Relations partition #(same as URIs or REGEX Terms that don't match rdf:type) pId = 'U_RNT' else: raise Exception("Unable to determine a parition to cover with the given predicate %s (a %s)"%(predicate,type(predicate).__name__)) if isinstance(object_,list) and len(object_) == 1: object_ = object_[0] if isinstance(object_,REGEXTerm): oId = 'R' elif isinstance(object_,Literal): oId = 'L' elif isinstance(object_,(URIRef,BNode,Graph)): oId = 'U' elif object_ is None: oId = 'W' elif isinstance(object_,list): if [o for o in object_ if isinstance(o,REGEXTerm)]: #If there are any REGEXTerms in the list then the list behaves as a REGEX / Wildcard oId = 'R' elif not [o for o in object_ if isinstance(o,REGEXTerm) or isinstance(o,Literal)]: #There are no Literals or REGEXTerms, the list behaves as a URI (i.e., it never checks literal partition) oId = 'U' elif len([o for o in object_ if isinstance(o,Literal)]) == len(object_): #They are all literals oId = 'L' else: #Treat as a wildcard oId = 'R' elif isinstance(object_,Variable): #Variables would only exist in the ABOX and 'other' Relations partition (same as URIs) oId = 'U' else: raise Exception("Unable to determine a parition to cover with the given object %s (a %s)"%(object_,type(object_).__name__)) targetBRPs = [brp for brp in BRPs if isinstance(brp,BRPQueryDecisionMap[pId+oId])] return targetBRPs def PatternResolution(quad,cursor,BRPs,orderByTriple=True,fetchall=True,fetchContexts=False): """ This function implements query pattern resolution against a list of partition objects and 3 parameters specifying whether to sort the result set (in order to group identical triples by the contexts in which they appear), whether to fetch the entire result set or one at a time, and whether to fetch the matching contexts only or the assertions. This function uses BinaryRelationPartitionCoverage to whittle out the partitions that don't need to be searched, generateHashIntersections / generateWhereClause to generate the SQL query and the parameter fill-ins and creates a single UNION query against the relevant partitions. Note the use of UNION syntax requires that the literal properties partition is first (since it uses the first select to determine the column types for the resulting rows from the subsequent SELECT queries) see: http://dev.mysql.com/doc/refman/5.0/en/union.html """ subject,predicate,object_,context = quad targetBRPs = BinaryRelationPartitionCoverage((subject,predicate,object_,context),BRPs) unionQueries = [] unionQueriesParams = [] for brp in targetBRPs: first = targetBRPs.index(brp) == 0 if fetchContexts: query = "SELECT DISTINCT %s FROM %s %s WHERE "%( ','.join(brp.selectContextFields(first)), brp, brp._intersectionSQL ) else: query = CROSS_BRP_QUERY_SQL%( ','.join(brp.selectFields(first)), brp, brp._intersectionSQL ) whereClause,whereParameters = brp.generateWhereClause((subject,predicate,object_,context)) unionQueries.append(query+whereClause) unionQueriesParams.extend(whereParameters) if fetchContexts: orderBySuffix = '' else: orderBySuffix = orderByTriple and ' ORDER BY %s,%s,%s'%(SlotPrefixes[SUBJECT],SlotPrefixes[PREDICATE],SlotPrefixes[OBJECT]) or '' if len(unionQueries) == 1: query = unionQueries[0] + orderBySuffix else: query = ' union all '.join(['('+q+')' for q in unionQueries]) + orderBySuffix try: if EXPLAIN_INFO: cursor.execute("EXPLAIN "+query,tuple(unionQueriesParams)) print query from pprint import pprint;pprint(cursor.fetchall()) cursor.execute(query,tuple(unionQueriesParams)) except ValueError,e: print "## Query ##\n",query print "## Parameters ##\n",unionQueriesParams raise e if fetchall: qRT = cursor.fetchall() else: qRT = cursor.fetchone() return qRT CREATE_RESULT_TABLE = \ """ CREATE TEMPORARY TABLE result ( subject text NOT NULL, subjectTerm enum('F','V','U','B','L') NOT NULL, predicate text NOT NULL, predicateTerm enum('F','V','U','B','L') NOT NULL, object text NOT NULL, objectTerm enum('F','V','U','B','L') NOT NULL, context text not NULL, contextTerm enum('F','V','U','B','L') NOT NULL, dataType text, language char(3), INDEX USING BTREE (context(50)) ) """ CROSS_BRP_QUERY_SQL="SELECT STRAIGHT_JOIN %s FROM %s %s WHERE " CROSS_BRP_RESULT_QUERY_SQL="SELECT * FROM result ORDER BY context" DROP_RESULT_TABLE_SQL = "DROP result" BRPQueryDecisionMap = { 'WL':(NamedLiteralProperties), 'WU':(AssociativeBox,NamedBinaryRelations), 'WW':(NamedLiteralProperties,AssociativeBox,NamedBinaryRelations), 'WR':(NamedLiteralProperties,AssociativeBox,NamedBinaryRelations), #Could be optimized to not include NamedBinaryRelations 'RTL':(NamedLiteralProperties), 'RTU':(NamedBinaryRelations,AssociativeBox), 'RTR':(NamedLiteralProperties,AssociativeBox,NamedBinaryRelations), #Could be optimized to not include NamedBinaryRelations 'TU':(AssociativeBox), 'TW':(AssociativeBox), 'TR':(AssociativeBox), 'U_RNTL':(NamedLiteralProperties), 'U_RNTU':(NamedBinaryRelations), 'U_RNTW':(NamedLiteralProperties,NamedBinaryRelations), 'U_RNTR':(NamedLiteralProperties,NamedBinaryRelations), #Could be optimized to not include NamedBinaryRelations } def test(): import doctest doctest.testmod() if __name__ == '__main__': test() rdflib-2.4.2/rdflib/store/PostgreSQL.py0000644000175000017500000007215711165212723016712 0ustar nachonacho## Copyright (c) 2009, Intel Corporation. All rights reserved. ## Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are ## met: ## * Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## * Redistributions in binary form must reproduce the above ## copyright notice, this list of conditions and the following ## disclaimer in the documentation and/or other materials provided ## with the distribution. ## * Neither the name of Daniel Krech nor the names of its ## contributors may be used to endorse or promote products derived ## from this software without specific prior written permission. ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from rdflib import BNode from rdflib.Literal import Literal import psycopg2 import sha,sys,re,os from rdflib.term_utils import * from rdflib.Graph import QuotedGraph from rdflib.store import Store,VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.store.REGEXMatching import REGEXTerm, NATIVE_REGEX, PYTHON_REGEX from rdflib.store.AbstractSQLStore import * Any = None def ParseConfigurationString(config_string): """ Parses a configuration string in the form: key1=val1,key2=val2,key3=val3,... The following configuration keys are expected (not all are required): user password db host port (optional - defaults to 5432) """ kvDict = dict([(part.split('=')[0],part.split('=')[-1]) for part in config_string.split(',')]) for requiredKey in ['user','db']: assert requiredKey in kvDict if 'port' not in kvDict: kvDict['port']=5432 if 'password' not in kvDict: kvDict['password']='' return kvDict def GetConfigurationString(configuration): """ Given a config-form string, return a dsn-form string """ configDict = ParseConfigurationString(configuration) dsn = "dbname=%s user=%s password=%s" % (configDict['db'], configDict['user'], configDict['password']) if configDict.has_key('port'): try: port = int(configDict['port']) dsn += " port=%s" % port except: raise ArithmeticError('PostgreSQL port must be a valid integer') return dsn # Though i appreciate that this was made into a function rather than # a method since it was universal, sadly different DBs quote values # differently. So i have to pull this, and all methods which call it, # into the Postgres implementation level. # #Helper function for building union all select statement #Takes a list of: # - table name # - table alias # - table type (literal, type, asserted, quoted) # - where clause string def unionSELECT(selectComponents,distinct=False,selectType=TRIPLE_SELECT): selects = [] for tableName,tableAlias,whereClause,tableType in selectComponents: if selectType == COUNT_SELECT: selectString = "select count(*)" tableSource = " from %s "%tableName elif selectType == CONTEXT_SELECT: selectString = "select %s.context"%tableAlias tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType in FULL_TRIPLE_PARTITIONS: selectString = "select *" tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType == ASSERTED_TYPE_PARTITION: selectString =\ """select %s.member as subject, '%s' as predicate, %s.klass as object, %s.context as context, %s.termComb as termComb, NULL as objLanguage, NULL as objDatatype"""%(tableAlias,RDF.type,tableAlias,tableAlias,tableAlias) tableSource = " from %s as %s "%(tableName,tableAlias) elif tableType == ASSERTED_NON_TYPE_PARTITION: selectString =\ """select *,NULL as objLanguage, NULL as objDatatype""" tableSource = " from %s as %s "%(tableName,tableAlias) selects.append(selectString + tableSource + whereClause) orderStmt = '' if selectType == TRIPLE_SELECT: orderStmt = ' order by subject,predicate,object' if distinct: return ' union '.join(selects) + orderStmt else: return ' union all '.join(selects) + orderStmt class PostgreSQL(AbstractSQLStore): """ PostgreSQL store formula-aware implementation. It stores it's triples in the following partitions (per AbstractSQLStore: - Asserted non rdf:type statements - Asserted rdf:type statements (in a table which models Class membership) The motivation for this partition is primarily query speed and scalability as most graphs will always have more rdf:type statements than others - All Quoted statements In addition it persists namespace mappings in a seperate table """ context_aware = True formula_aware = True transaction_aware = True regex_matching = NATIVE_REGEX autocommit_default = False def open(self, configuration, create=True): """ Opens the store specified by the configuration string. If create is True a store will be created if it does not already exist. If create is False and a store does not already exist an exception is raised. An exception is also raised if a store exists, but there is insufficient permissions to open the store. """ self._db = psycopg2.connect(GetConfigurationString(configuration)) if self._db: if create: self.init_db() if self.db_exists(): return VALID_STORE else: self._db = None return NO_STORE else: return NO_STORE def db_exists(self): c = self._db.cursor() c.execute("SELECT relname from pg_class") tbls = [rt[0] for rt in c.fetchall()] c.close() for tn in [tbl % (self._internedId) for tbl in table_name_prefixes]: if tn not in tbls: sys.stderr.write("table %s Doesn't exist\n" % (tn)) return 0 return 1 def init_db(self): c=self._db.cursor() for x in CREATE_TABLE_STMTS: c.execute(x % (self._internedId)) for tblName,indices in INDICES: for indexName,columns in indices: c.execute("CREATE INDEX %s on %s (%s)" % ((indexName % self._internedId), (tblName % self._internedId), ','.join(columns))) c.close() self._db.commit() # opposite of init_db, takes a config string def destroy(self, configuration): db = psycopg2.connect(GetConfigurationString(configuration)) c = db.cursor() for tblname in table_name_prefixes: fullname = tblname % self._internedId try: c.execute("DROP TABLE %s" % fullname) except: sys.stderr.write("unable to drop table: %s\n" % fullname) db.commit() c.close() db.close() print "Destroyed Close World Universe %s (in PostgreSQL database %s)" % (self.identifier, configuration) def EscapeQuotes(self, qstr): # overridden because PostgreSQL is in its own quoting world if qstr is None: return '' tmp = qstr.replace("'", "''") return tmp # copied and pasted primarily to use the local unionSELECT instead # of the one provided by AbstractSQLStore def triples(self, (subject, predicate, obj), context=None): """ A generator over all the triples matching pattern. Pattern can be any objects for comparing against nodes in the store, for example, RegExLiteral, Date? DateRange? quoted table: _quoted_statements asserted rdf:type table: _type_statements asserted non rdf:type table: _asserted_statements triple columns: subject,predicate,object,context,termComb,objLanguage,objDatatype class membership columns: member,klass,context termComb FIXME: These union all selects *may* be further optimized by joins """ quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId c=self._db.cursor() parameters = [] if predicate == RDF.type: #select from asserted rdf:type partition and quoted table (if a context is specified) clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,context,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate: #Select from quoted partition (if context is specified), literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,context,True) parameters.extend(params) selects.append( ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ) ) elif predicate: #select from asserted non rdf:type partition (optionally), quoted partition (if context is speciied), and literal partition (optionally) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj,context) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj,context) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) if context is not None: clauseString,params = self.buildClause('quoted',subject,predicate, obj,context) parameters.extend(params) selects.append( ( quoted_table, 'quoted', clauseString, QUOTED_PARTITION ) ) q=self._normalizeSQLCmd(unionSELECT(selects)) self.executeSQL(c,q,parameters) rt = c.fetchone() while rt: s,p,o,(graphKlass,idKlass,graphId) = extractTriple(rt,self,context) currentContext=graphKlass(self,idKlass(graphId)) contexts = [currentContext] rt = next = c.fetchone() sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) while sameTriple: s2,p2,o2,(graphKlass,idKlass,graphId) = extractTriple(next,self,context) c2 = graphKlass(self,idKlass(graphId)) contexts.append(c2) rt = next = c.fetchone() sameTriple = next and extractTriple(next,self,context)[:3] == (s,p,o) yield (s,p,o),(c for c in contexts) # copied and pasted primarily to use the local unionSELECT instead # of the one provided by AbstractSQLStore def __repr__(self): c=self._db.cursor() quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId selects = [ ( asserted_type_table, 'typeTable', '', ASSERTED_TYPE_PARTITION ), ( quoted_table, 'quoted', '', QUOTED_PARTITION ), ( asserted_table, 'asserted', '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=False,selectType=COUNT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q)) rt=c.fetchall() typeLen,quotedLen,assertedLen,literalLen = [rtTuple[0] for rtTuple in rt] return ""%(len([c for c in self.contexts()]),typeLen,quotedLen,literalLen,assertedLen) # copied and pasted primarily to use the local unionSELECT instead # of the one provided by AbstractSQLStore def __len__(self, context=None): """ Number of statements in the store. """ c=self._db.cursor() quoted_table="%s_quoted_statements"%self._internedId asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId parameters = [] quotedContext = assertedContext = typeContext = literalContext = None clauseParts = self.buildContextClause(context,quoted_table) if clauseParts: quotedContext,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,asserted_table) if clauseParts: assertedContext,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,asserted_type_table) if clauseParts: typeContext ,params = clauseParts parameters.extend([p for p in params if p]) clauseParts = self.buildContextClause(context,literal_table) if clauseParts: literalContext,params = clauseParts parameters.extend([p for p in params if p]) if context is not None: selects = [ ( asserted_type_table, 'typeTable', typeContext and 'where ' + typeContext or '', ASSERTED_TYPE_PARTITION ), ( quoted_table, 'quoted', quotedContext and 'where ' + quotedContext or '', QUOTED_PARTITION ), ( asserted_table, 'asserted', assertedContext and 'where ' + assertedContext or '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', literalContext and 'where ' + literalContext or '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=True,selectType=COUNT_SELECT) else: selects = [ ( asserted_type_table, 'typeTable', typeContext and 'where ' + typeContext or '', ASSERTED_TYPE_PARTITION ), ( asserted_table, 'asserted', assertedContext and 'where ' + assertedContext or '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', literalContext and 'where ' + literalContext or '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=False,selectType=COUNT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q),parameters) rt=c.fetchall() c.close() return reduce(lambda x,y: x+y, [rtTuple[0] for rtTuple in rt]) # This is taken from AbstractSQLStore, and modified, specifically # to not query quoted_statements. The comments in the original # indicate that quoted_statements were queried conditionally, but # the code does otherwise. # # As far as i can tell, quoted_statements contains formulae, which # should not be returned as valid global contexts (at least, as per # the in-memory and MySQL store implementations), so those queries # have been completely excised until a case is made that they are # necessary. # # It's reasonable that the AbstractSQLStore implementation is closer # to the original design, but this conforms to working implementations. def contexts(self, triple=None): c=self._db.cursor() asserted_table="%s_asserted_statements"%self._internedId asserted_type_table="%s_type_statements"%self._internedId literal_table = "%s_literal_statements"%self._internedId parameters = [] if triple is not None: subject,predicate,obj=triple if predicate == RDF.type: #select from asserted rdf:type partition clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,Any,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] elif isinstance(predicate,REGEXTerm) and predicate.compiledExpr.match(RDF.type) or not predicate: #Select from literal partition if (obj is Literal or None) and asserted non rdf:type partition (if obj is URIRef or None) clauseString,params = self.buildClause('typeTable',subject,RDF.type,obj,Any,True) parameters.extend(params) selects = [ ( asserted_type_table, 'typeTable', clauseString, ASSERTED_TYPE_PARTITION ), ] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) elif predicate: #select from asserted non rdf:type partition (optionally) and literal partition (optionally) selects = [] if not self.STRONGLY_TYPED_TERMS or isinstance(obj,Literal) or not obj or (self.STRONGLY_TYPED_TERMS and isinstance(obj,REGEXTerm)): clauseString,params = self.buildClause('literal',subject,predicate,obj) parameters.extend(params) selects.append(( literal_table, 'literal', clauseString, ASSERTED_LITERAL_PARTITION )) if not isinstance(obj,Literal) and not (isinstance(obj,REGEXTerm) and self.STRONGLY_TYPED_TERMS) or not obj: clauseString,params = self.buildClause('asserted',subject,predicate,obj) parameters.extend(params) selects.append(( asserted_table, 'asserted', clauseString, ASSERTED_NON_TYPE_PARTITION )) q=unionSELECT(selects,distinct=True,selectType=CONTEXT_SELECT) else: selects = [ ( asserted_type_table, 'typeTable', '', ASSERTED_TYPE_PARTITION ), ( asserted_table, 'asserted', '', ASSERTED_NON_TYPE_PARTITION ), ( literal_table, 'literal', '', ASSERTED_LITERAL_PARTITION ), ] q=unionSELECT(selects,distinct=True,selectType=CONTEXT_SELECT) self.executeSQL(c,self._normalizeSQLCmd(q),parameters) rt=c.fetchall() for contextId in [x[0] for x in rt]: yield Graph(self, URIRef(contextId)) c.close() # overridden for quote-character reasons def executeSQL(self,cursor,qStr,params=None,paramList=False): """ This takes the query string and parameters and (depending on the SQL implementation) either fill in the parameter in-place or pass it on to the Python DB impl (if it supports this). The default (here) is to fill the parameters in-place surrounding each param with quote characters """ if not params: cursor.execute(unicode(qStr)) elif paramList: raise Exception("Not supported!") else: params = tuple([item == 'NULL' and item or not isinstance(item,int) and u"'%s'" % item or item for item in params]) cursor.execute(qStr%params) # new method abstracting much cut/paste code from AbstractSQLStore. def buildGenericClause(self, generic, value, tableName): if isinstance(value,REGEXTerm): return " REGEXP (%s,"+" %s)" % (tableName and '%s.%s' % (tableName, generic) or generic), [value] elif isinstance(value,list): clauseStrings=[] paramStrings = [] for s in value: if isinstance(s,REGEXTerm): clauseStrings.append(" REGEXP (%s,"+" %s)" % (tableName and '%s.%s' % (tableName, generic) or generic) + " %s") paramStrings.append(self.normalizeTerm(s)) elif isinstance(s,(QuotedGraph,Graph)): clauseStrings.append("%s="%(tableName and '%s.%s' % (tableName, generic) or generic) + "%s") paramStrings.append(self.normalizeTerm(s.identifier)) else: clauseStrings.append("%s=" % (tableName and '%s.%s' % (tableName, generic) or generic) + "%s") paramStrings.append(self.normalizeTerm(s)) return '('+ ' or '.join(clauseStrings) + ')', paramStrings elif isinstance(value, (QuotedGraph,Graph)): return "%s=" % (tableName and '%s.%s' % (tableName, generic) or generic) + "%s", [self.normalizeTerm(value.identifier)] elif value == 'NULL': return "%s is null" % (tableName and '%s.%s' % (tableName, generic) or generic), [] else: return value is not None and "%s=" % (tableName and '%s.%s' % (tableName, generic) or generic) + "%s", [value] or None def buildSubjClause(self,subject,tableName): return self.buildGenericClause("subject", subject, tableName) def buildPredClause(self,predicate,tableName): return self.buildGenericClause("predicate", predicate, tableName) def buildObjClause(self,obj,tableName): return self.buildGenericClause("object", obj, tableName) def buildContextClause(self,context,tableName): context = context is not None and self.normalizeTerm(context.identifier) or context return self.buildGenericClause("context", context, tableName) def buildTypeMemberClause(self,subject,tableName): return self.buildGenericClause("member", subject, tableName) def buildTypeClassClause(self,obj,tableName): return self.buildGenericClause("klass", obj, tableName) CREATE_ASSERTED_STATEMENTS_TABLE = """ CREATE TABLE %s_asserted_statements ( subject text not NULL, predicate text not NULL, object text not NULL, context text not NULL, termComb smallint not NULL)""" CREATE_ASSERTED_TYPE_STATEMENTS_TABLE = """ CREATE TABLE %s_type_statements ( member text not NULL, klass text not NULL, context text not NULL, termComb smallint not NULL)""" CREATE_LITERAL_STATEMENTS_TABLE = """ CREATE TABLE %s_literal_statements ( subject text not NULL, predicate text not NULL, object text, context text not NULL, termComb smallint not NULL, objLanguage varchar(3), objDatatype text)""" CREATE_QUOTED_STATEMENTS_TABLE = """ CREATE TABLE %s_quoted_statements ( subject text not NULL, predicate text not NULL, object text, context text not NULL, termComb smallint not NULL, objLanguage varchar(3), objDatatype text)""" CREATE_NS_BINDS_TABLE = """ CREATE TABLE %s_namespace_binds ( prefix varchar(20) UNIQUE not NULL, uri text, PRIMARY KEY (prefix))""" CREATE_TABLE_STMTS = [CREATE_ASSERTED_STATEMENTS_TABLE, CREATE_ASSERTED_TYPE_STATEMENTS_TABLE, CREATE_QUOTED_STATEMENTS_TABLE, CREATE_NS_BINDS_TABLE, CREATE_LITERAL_STATEMENTS_TABLE] INDICES = [ ( "%s_asserted_statements", [ ("%s_A_termComb_index",('termComb',)), ("%s_A_s_index",('subject',)), ("%s_A_p_index",('predicate',)), ("%s_A_o_index",('object',)), ("%s_A_c_index",('context',)), ], ), ( "%s_type_statements", [ ("%s_T_termComb_index",('termComb',)), ("%s_member_index",('member',)), ("%s_klass_index",('klass',)), ("%s_c_index",('context',)), ], ), ( "%s_literal_statements", [ ("%s_L_termComb_index",('termComb',)), ("%s_L_s_index",('subject',)), ("%s_L_p_index",('predicate',)), ("%s_L_c_index",('context',)), ], ), ( "%s_quoted_statements", [ ("%s_Q_termComb_index",('termComb',)), ("%s_Q_s_index",('subject',)), ("%s_Q_p_index",('predicate',)), ("%s_Q_o_index",('object',)), ("%s_Q_c_index",('context',)), ], ), ( "%s_namespace_binds", [ ("%s_uri_index",('uri',)), ], )] rdflib-2.4.2/rdflib/store/IOMemory.py0000644000175000017500000003405411153616032016377 0ustar nachonacho# Authors: Michel Pelletier, Daniel Krech, Stefan Niederhauser from __future__ import generators Any = None from rdflib.BNode import BNode from rdflib.store import Store class IOMemory(Store): """\ An integer-key-optimized-context-aware-in-memory store. Uses nested dictionaries to store triples and context. Each triple is stored in six such indices as follows cspo[c][s][p][o] = 1 and cpos[c][p][o][s] = 1 and cosp[c][o][s][p] = 1 as well as spo[s][p][o] = [c] and pos[p][o][s] = [c] and pos[o][s][p] = [c] Context information is used to track the 'source' of the triple data for merging, unmerging, remerging purposes. context aware store stores consume more memory size than non context stores. """ context_aware = True formula_aware = True def __init__(self, configuration=None, identifier=None): super(IOMemory, self).__init__() # indexed by [context][subject][predicate][object] = 1 self.cspo = self.createIndex() # indexed by [context][predicate][object][subject] = 1 self.cpos = self.createIndex() # indexed by [context][object][subject][predicate] = 1 self.cosp = self.createIndex() # indexed by [subject][predicate][object] = [context] self.spo = self.createIndex() # indexed by [predicate][object][subject] = [context] self.pos = self.createIndex() # indexed by [object][subject][predicate] = [context] self.osp = self.createIndex() # indexes integer keys to identifiers self.forward = self.createForward() # reverse index of forward self.reverse = self.createReverse() self.identifier = identifier or BNode() self.__namespace = self.createPrefixMap() self.__prefix = self.createPrefixMap() def bind(self, prefix, namespace): self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): return self.__namespace.get(prefix, None) def prefix(self, namespace): return self.__prefix.get(namespace, None) def namespaces(self): for prefix, namespace in self.__namespace.iteritems(): yield prefix, namespace def defaultContext(self): return self.default_context def addContext(self, context): """ Add context w/o adding statement. Dan you can remove this if you want """ if not self.reverse.has_key(context): ci=randid() while not self.forward.insert(ci, context): ci=randid() self.reverse[context] = ci def intToIdentifier(self, (si, pi, oi)): """ Resolve an integer triple into identifers. """ return (self.forward[si], self.forward[pi], self.forward[oi]) def identifierToInt(self, (s, p, o)): """ Resolve an identifier triple into integers. """ return (self.reverse[s], self.reverse[p], self.reverse[o]) def uniqueSubjects(self, context=None): if context is None: index = self.spo else: index = self.cspo[context] for si in index.keys(): yield self.forward[si] def uniquePredicates(self, context=None): if context is None: index = self.pos else: index = self.cpos[context] for pi in index.keys(): yield self.forward[pi] def uniqueObjects(self, context=None): if context is None: index = self.osp else: index = self.cosp[context] for oi in index.keys(): yield self.forward[oi] def createForward(self): return {} def createReverse(self): return {} def createIndex(self): return {} def createPrefixMap(self): return {} def add(self, triple, context, quoted=False): """\ Add a triple to the store. """ Store.add(self, triple, context, quoted) for triple, cg in self.triples(triple, context): #triple is already in the store. return subject, predicate, object = triple f = self.forward r = self.reverse # assign keys for new identifiers if not r.has_key(subject): si=randid() while f.has_key(si): si=randid() f[si] = subject r[subject] = si else: si = r[subject] if not r.has_key(predicate): pi=randid() while f.has_key(pi): pi=randid() f[pi] = predicate r[predicate] = pi else: pi = r[predicate] if not r.has_key(object): oi=randid() while f.has_key(oi): oi=randid() f[oi] = object r[object] = oi else: oi = r[object] if not r.has_key(context): ci=randid() while f.has_key(ci): ci=randid() f[ci] = context r[context] = ci else: ci = r[context] # add dictionary entries for cspo[c][s][p][o] = 1, # cpos[c][p][o][s] = 1, and cosp[c][o][s][p] = 1, creating the # nested {} where they do not yet exits. self._setNestedIndex(self.cspo, ci, si, pi, oi) self._setNestedIndex(self.cpos, ci, pi, oi, si) self._setNestedIndex(self.cosp, ci, oi, si, pi) if not quoted: self._setNestedIndex(self.spo, si, pi, oi, ci) self._setNestedIndex(self.pos, pi, oi, si, ci) self._setNestedIndex(self.osp, oi, si, pi, ci) def _setNestedIndex(self, index, *keys): for key in keys[:-1]: if not index.has_key(key): index[key] = self.createIndex() index = index[key] index[keys[-1]] = 1 def _removeNestedIndex(self, index, *keys): """ Remove context from the list of contexts in a nested index. Afterwards, recursively remove nested indexes when they became empty. """ parents = [] for key in keys[:-1]: parents.append(index) index = index[key] del index[keys[-1]] n = len(parents) for i in xrange(n): index = parents[n-1-i] key = keys[n-1-i] if len(index[key]) == 0: del index[key] def remove(self, triple, context=None): Store.remove(self, triple, context) if context is not None: if context == self: context = None f = self.forward r = self.reverse if context is None: for triple, cg in self.triples(triple): subject, predicate, object = triple si, pi, oi = self.identifierToInt((subject, predicate, object)) contexts = list(self.contexts(triple)) for context in contexts: ci = r[context] del self.cspo[ci][si][pi][oi] del self.cpos[ci][pi][oi][si] del self.cosp[ci][oi][si][pi] self._removeNestedIndex(self.spo, si, pi, oi, ci) self._removeNestedIndex(self.pos, pi, oi, si, ci) self._removeNestedIndex(self.osp, oi, si, pi, ci) # grr!! hafta ref-count these before you can collect them dumbass! #del f[si], f[pi], f[oi] #del r[subject], r[predicate], r[object] else: subject, predicate, object = triple ci = r.get(context, None) if ci: for triple, cg in self.triples(triple, context): si, pi, oi = self.identifierToInt(triple) del self.cspo[ci][si][pi][oi] del self.cpos[ci][pi][oi][si] del self.cosp[ci][oi][si][pi] try: self._removeNestedIndex(self.spo, si, pi, oi, ci) self._removeNestedIndex(self.pos, pi, oi, si, ci) self._removeNestedIndex(self.osp, oi, si, pi, ci) except KeyError: # the context may be a quoted one in which # there will not be a triple in spo, pos or # osp. So ignore any KeyErrors pass # TODO delete references to resources in self.forward/self.reverse # that are not in use anymore... if subject is None and predicate is None and object is None: # remove context try: ci = self.reverse[context] del self.cspo[ci], self.cpos[ci], self.cosp[ci] except KeyError: # TODO: no exception when removing non-existant context? pass def triples(self, triple, context=None): """A generator over all the triples matching """ if context is not None: if context == self: context = None subject, predicate, object = triple ci = si = pi = oi = Any if context is None: spo = self.spo pos = self.pos osp = self.osp else: try: ci = self.reverse[context] # TODO: Really ignore keyerror here spo = self.cspo[ci] pos = self.cpos[ci] osp = self.cosp[ci] except KeyError: return try: if subject is not Any: si = self.reverse[subject] # throws keyerror if subject doesn't exist ;( if predicate is not Any: pi = self.reverse[predicate] if object is not Any: oi = self.reverse[object] except KeyError, e: return #raise StopIteration if si != Any: # subject is given if spo.has_key(si): subjectDictionary = spo[si] if pi != Any: # subject+predicate is given if subjectDictionary.has_key(pi): if oi!= Any: # subject+predicate+object is given if subjectDictionary[pi].has_key(oi): ss, pp, oo = self.intToIdentifier((si, pi, oi)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # given object not found pass else: # subject+predicate is given, object unbound for o in subjectDictionary[pi].keys(): ss, pp, oo = self.intToIdentifier((si, pi, o)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # given predicate not found pass else: # subject given, predicate unbound for p in subjectDictionary.keys(): if oi != Any: # object is given if subjectDictionary[p].has_key(oi): ss, pp, oo = self.intToIdentifier((si, p, oi)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # given object not found pass else: # object unbound for o in subjectDictionary[p].keys(): ss, pp, oo = self.intToIdentifier((si, p, o)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # given subject not found pass elif pi != Any: # predicate is given, subject unbound if pos.has_key(pi): predicateDictionary = pos[pi] if oi != Any: # predicate+object is given, subject unbound if predicateDictionary.has_key(oi): for s in predicateDictionary[oi].keys(): ss, pp, oo = self.intToIdentifier((s, pi, oi)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # given object not found pass else: # predicate is given, object+subject unbound for o in predicateDictionary.keys(): for s in predicateDictionary[o].keys(): ss, pp, oo = self.intToIdentifier((s, pi, o)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) elif oi != Any: # object is given, subject+predicate unbound if osp.has_key(oi): objectDictionary = osp[oi] for s in objectDictionary.keys(): for p in objectDictionary[s].keys(): ss, pp, oo = self.intToIdentifier((s, p, oi)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) else: # subject+predicate+object unbound for s in spo.keys(): subjectDictionary = spo[s] for p in subjectDictionary.keys(): for o in subjectDictionary[p].keys(): ss, pp, oo = self.intToIdentifier((s, p, o)) yield (ss, pp, oo), (c for c in self.contexts((ss, pp, oo))) def __len__(self, context=None): if context is not None: if context == self: context = None # TODO: for eff. implementation count = 0 for triple, cg in self.triples((Any, Any, Any), context): count += 1 return count def contexts(self, triple=None): if triple: si, pi, oi = self.identifierToInt(triple) for ci in self.spo[si][pi][oi]: yield self.forward[ci] else: for ci in self.cspo.keys(): yield self.forward[ci] import random def randid(randint=random.randint, choice=random.choice, signs=(-1,1)): return choice(signs)*randint(1,2000000000) del random rdflib-2.4.2/rdflib/store/BerkeleyDB.py0000644000175000017500000002734111153616032016650 0ustar nachonachoimport warnings, thread, sys from rdflib.store import Store, VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN from rdflib.store.Sleepycat import Sleepycat from rdflib.URIRef import URIRef from bsddb import db from os import mkdir, rmdir, makedirs from os.path import exists, abspath, join from urllib import pathname2url from threading import Thread from time import sleep, time import logging if db.version() < (4,3,29): warnings.warn("Your BDB library may not be supported.") SUPPORT_MULTIPLE_STORE_ENVIRON = False _logger = logging.getLogger(__name__) class TransactionExpired(Exception): pass # A transaction decorator for BDB def transaction(f, name=None, **kwds): def wrapped(*args, **kwargs): bdb = args[0] retries = 10 delay = 1 e = None #t = kwargs['env'].txn_begin() while retries > 0: kwargs['txn'] = bdb.begin_txn() try: result = f(*args, **kwargs) bdb.commit() # returns here when the transaction was successful return result except MemoryError, e: # Locks are leaking in this code or in BDB # print "out of locks: ", e, sys.exc_info()[0], self.db_env.lock_stat()['nlocks'] bdb.rollback() retries = 0 except db.DBLockDeadlockError, e: # print "Deadlock when adding data: ", e bdb.rollback() sleep(0.1*delay) #delay = delay << 1 retries -= 1 except Exception, e: # print "Got exception in add:", sys.exc_info()[0], e, bdb.dbTxn[thread.get_ident()], bdb.db_env.lock_stat()['nlocks'], retries bdb.rollback() #print "After rollback", e, add_txn, self.__dbTxn[thread.get_ident()], thread.get_ident() retries -= 1 #print "Retries failed!", bdb.db_env.lock_stat()['nlocks'] raise TransactionExpired("Add failed after exception:" % str(e)) # except Exception, e: # print "Got exception: ", e # bdb.rollback() #t.abort() wrapped.__doc__ = f.__doc__ return wrapped class BerkeleyDB(Sleepycat): """ A transaction-capable BerkeleyDB implementation The major difference are: - a dbTxn attribute which is the transaction object used for all bsddb databases - All operations (put,delete,get) take the dbTxn instance - The actual directory used for the bsddb persistence is the name of the identifier as a subdirectory of the 'path' """ context_aware = True formula_aware = True transaction_aware = True def __init__(self, configuration=None, identifier=None): super(BerkeleyDB, self).__init__(configuration, identifier) # number of locks, lockers and objects self.__locks = 5000 # when closing is True, no new transactions are allowed self.__closing = False # Each thread is responsible for a single transaction (included nested # ones) indexed by the thread id self.__dbTxn = {} def destroy(self, configuration): """ Destroy the underlying bsddb persistence for this store """ if SUPPORT_MULTIPLE_STORE_ENVIRON: fullDir = join(configuration,self.identifier) else: fullDir = configuration if exists(configuration): #From bsddb docs: #A DB_ENV handle that has already been used to open an environment #should not be used to call the DB_ENV->remove function; a new DB_ENV handle should be created for that purpose. self.close() db.DBEnv().remove(fullDir,db.DB_FORCE) def _init_db_environment(self, homeDir, create=True): #NOTE: The identifier is appended to the path as the location for the db #This provides proper isolation for stores which have the same path but different identifiers if SUPPORT_MULTIPLE_STORE_ENVIRON: fullDir = join(homeDir,self.identifier) else: fullDir = homeDir envsetflags = db.DB_CDB_ALLDB envflags = db.DB_INIT_MPOOL | db.DB_INIT_LOCK | db.DB_THREAD | db.DB_INIT_TXN | db.DB_RECOVER if not exists(fullDir): if create==True: makedirs(fullDir) self.create(path) else: return NO_STORE db_env = db.DBEnv() db_env.set_cachesize(0, 1024*1024*50) # TODO # enable deadlock-detection db_env.set_lk_detect(db.DB_LOCK_MAXLOCKS) # increase the number of locks, this is correlated to the size (num triples) that # can be added/removed with a single transaction db_env.set_lk_max_locks(self.__locks) db_env.set_lk_max_lockers(self.__locks) db_env.set_lk_max_objects(self.__locks) #db_env.set_lg_max(1024*1024) #db_env.set_flags(envsetflags, 1) db_env.open(fullDir, envflags | db.DB_CREATE,0) return db_env #Transactional interfaces def begin_txn(self): """ Start a bsddb transaction. If the current thread already has a running transaction, a nested transaction with the first transaction for this thread as parent is started. See: http://pybsddb.sourceforge.net/ref/transapp/nested.html for more on nested transactions in BDB. """ # A user should be able to wrap several operations in a transaction. # For example, two or more adds when adding a graph. # Each internal operation should be a transaction, e.g. an add # must be atomic and isolated. However, since add should handle # BDB exceptions (like deadlock), an internal transaction should # not fail the user transaction. Here, nested transactions are used # which have this property. txn = None try: if not thread.get_ident() in self.__dbTxn and self.is_open() and not self.__closing: self.__dbTxn[thread.get_ident()] = [] # add the new transaction to the list of transactions txn = self.db_env.txn_begin() self.__dbTxn[thread.get_ident()].append(txn) else: # add a nested transaction with the top one as parent txn = self.db_env.txn_begin(self.__dbTxn[thread.get_ident()][0]) self.__dbTxn[thread.get_ident()].append(txn) except Exception, e: print "begin_txn: ", e if txn != None: txn.abort() # return the transaction handle return txn def commit(self, commit_root=False): """ Bsddb tx objects cannot be reused after commit. Set rollback_root to true to commit all active transactions for the current thread. """ if thread.get_ident() in self.__dbTxn and self.is_open(): try: # when the root commits, all childs commit as well if commit_root == True: self.__dbTxn[thread.get_ident()][0].commit(0) # no more transactions, clean up del self.__dbTxn[thread.get_ident()] else: txn = self.__dbTxn[thread.get_ident()].pop() _logger.debug("committing") #before = self.db_env.lock_stat()['nlocks'] txn.commit(0) #print "committing a transaction", self.__dbTxn[thread.get_ident()], txn, before, self.db_env.lock_stat()['nlocks'] if len(self.__dbTxn[thread.get_ident()]) == 0: del self.__dbTxn[thread.get_ident()] except IndexError, e: #The dbTxn for the current thread is removed to indicate that #there are no active transactions for the current thread. del self.__dbTxn[thread.get_ident()] except Exception, e: # print "Got exception in commit", e raise e else: _logger.warning("No transaction to commit") def rollback(self, rollback_root=False): """ Bsddb tx objects cannot be reused after commit. Set rollback_root to true to abort all active transactions for the current thread. """ if thread.get_ident() in self.__dbTxn and self.is_open(): _logger.debug("rollingback") try: if rollback_root == True: # same as commit, when root aborts, all childs abort self.__dbTxn[thread.get_ident()][0].abort() del self.__dbTxn[thread.get_ident()] else: txn = self.__dbTxn[thread.get_ident()].pop() #before = self.db_env.lock_stat()['nlocks'] # print "rolling back a transaction", self.__dbTxn[thread.get_ident()], txn, before, self.db_env.lock_stat()['nlocks'] txn.abort() if len(self.__dbTxn[thread.get_ident()]) == 0: del self.__dbTxn[thread.get_ident()] except IndexError, e: #The dbTxn for the current thread is removed to indicate that #there are no active transactions for the current thread. del self.__dbTxn[thread.get_ident()] except Exception, e: # print "Got exception in rollback", e raise e else: _logger.warning("No transaction to rollback") def close(self, commit_pending_transaction=True): """ Properly handles transactions explicitely (with parameter) or by default """ # when closing, no new transactions are allowed # problem is that a thread can already have passed the test and is # half-way through begin_txn when close is called... self.__closing = True if not self.is_open(): return # this should close all existing transactions, not only by this thread, # uses the number of active transactions to sync on. if self.__dbTxn: # this will block for a while, depending on how long it takes # before the active transactions are committed/aborted while self.db_env.txn_stat()['nactive'] > 0: active_threads = self.__dbTxn.keys() for t in active_threads: if not commit_pending_transaction: self.rollback(rollback_root=True) else: self.commit(commit_root=True) sleep(0.1) # there may still be open transactions super(BerkeleyDB, self).close() def add(self, (subject, predicate, object_), context, quoted=False): @transaction def _add(self, (subject, predicate, object_), context, quoted, txn=None): Sleepycat.add(self, (subject, predicate, object_), context, quoted, txn) try: _add(self, (subject, predicate, object_), context, quoted) except Exception, e: # print "Got exception in _add: ", e raise e def remove(self, (subject, predicate, object_), context): @transaction def _remove(self, (subject, predicate, object_), context, txn=None): Sleepycat.remove(self, (subject, predicate, object_), context, txn=txn) try: _remove(self, (subject, predicate, object_), context) except Exception, e: # print "Got exception in _remove: ", e raise e rdflib-2.4.2/rdflib/store/__init__.py0000644000175000017500000002264311153616032016437 0ustar nachonacho## Context-aware: An RDF store capable of storing statements within contexts is considered context-aware. ## Essentially, such a store is able to partition the RDF model it represents into individual, named, and addressable sub-graphs. ## Relevant Notation3 reference regarding formula's, quoted statements, and such: http://www.w3.org/DesignIssues/Notation3.html ## Formula-aware: An RDF store capable of distinguishing between statements that are asserted and statements ## that are quoted is considered formula-aware. ## Conjunctive Graph: This refers to the 'top-level' Graph. It is the aggregation of all the contexts ## within it and is also the appropriate, absolute boundary for closed world assumptions / models. ## For the sake of persistence, Conjunctive Graphs must be distinguished by identifiers (that may not ## neccessarily be RDF identifiers or may be an RDF identifier normalized - SHA1/MD5 perhaps - for database ## naming purposes ). ## Conjunctive Query: Any query that doesn't limit the store to search within a named context only. Such a ## query expects a context-aware store to search the entire asserted universe (the conjunctive graph). ## A formula-aware store is expected not to include quoted statements when matching such a query. from rdflib import RDF from rdflib import exceptions #Constants representing the state of a Store (returned by the open method) VALID_STORE = 1 CORRUPTED_STORE = 0 NO_STORE = -1 UNKNOWN = None from rdflib.events import Dispatcher, Event class StoreCreatedEvent(Event): """ This event is fired when the Store is created, it has the folloing attribute: - 'configuration' string that is used to create the store """ class TripleAddedEvent(Event): """ This event is fired when a triple is added, it has the following attributes: - 'triple' added to the graph - 'context' of the triple if any - 'graph' that the triple was added to """ class TripleRemovedEvent(Event): """ This event is fired when a triple is removed, it has the following attributes: - 'triple' removed from the graph - 'context' of the triple if any - 'graph' that the triple was removed from """ class Store(object): #Properties context_aware = False formula_aware = False transaction_aware = False batch_unification = False def __init__(self, configuration=None, identifier=None): """ identifier: URIRef of the Store. Defaults to CWD configuration: string containing infomation open can use to connect to datastore. """ self.__node_pickler = None self.dispatcher = Dispatcher() if configuration: self.open(configuration) def __get_node_pickler(self): if self.__node_pickler is None: from rdflib.store.NodePickler import NodePickler from rdflib.URIRef import URIRef from rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.Graph import Graph, QuotedGraph, GraphValue from rdflib.Variable import Variable from rdflib.Statement import Statement self.__node_pickler = np = NodePickler() np.register(self, "S") np.register(URIRef, "U") np.register(BNode, "B") np.register(Literal, "L") np.register(Graph, "G") np.register(QuotedGraph, "Q") np.register(Variable, "V") np.register(Statement, "s") np.register(GraphValue, "v") return self.__node_pickler node_pickler = property(__get_node_pickler) #Database management methods def create(self, configuration): self.dispatcher.dispatch(StoreCreatedEvent(configuration=configuration)) def open(self, configuration, create=False): """ Opens the store specified by the configuration string. If create is True a store will be created if it does not already exist. If create is False and a store does not already exist an exception is raised. An exception is also raised if a store exists, but there is insufficient permissions to open the store. This should return one of VALID_STORE,CORRUPTED_STORE,or NO_STORE """ return UNKNOWN def close(self, commit_pending_transaction=False): """ This closes the database connection. The commit_pending_transaction parameter specifies whether to commit all pending transactions before closing (if the store is transactional). """ def destroy(self, configuration): """ This destroys the instance of the store identified by the configuration string. """ def gc(self): """ Allows the store to perform any needed garbage collection """ pass #RDF APIs def add(self, (subject, predicate, object), context, quoted=False): """ Adds the given statement to a specific context or to the model. The quoted argument is interpreted by formula-aware stores to indicate this statement is quoted/hypothetical It should be an error to not specify a context and have the quoted argument be True. It should also be an error for the quoted argument to be True when the store is not formula-aware. """ self.dispatcher.dispatch(TripleAddedEvent(triple=(subject, predicate, object), context=context)) def addN(self, quads): """ Adds each item in the list of statements to a specific context. The quoted argument is interpreted by formula-aware stores to indicate this statement is quoted/hypothetical. Note that the default implementation is a redirect to add """ for s,p,o,c in quads: assert c is not None, "Context associated with %s %s %s is None!"%(s,p,o) self.add( (s,p,o), c ) def remove(self, (subject, predicate, object), context=None): """ Remove the set of triples matching the pattern from the store """ self.dispatcher.dispatch(TripleRemovedEvent(triple=(subject, predicate, object), context=context)) def triples_choices(self, (subject, predicate, object_),context=None): """ A variant of triples that can take a list of terms instead of a single term in any slot. Stores can implement this to optimize the response time from the default 'fallback' implementation, which will iterate over each term in the list and dispatch to tripless """ if isinstance(object_,list): assert not isinstance(subject,list), "object_ / subject are both lists" assert not isinstance(predicate,list), "object_ / predicate are both lists" if object_: for obj in object_: for (s1, p1, o1), cg in self.triples((subject,predicate,obj),context): yield (s1, p1, o1), cg else: for (s1, p1, o1), cg in self.triples((subject,predicate,None),context): yield (s1, p1, o1), cg elif isinstance(subject,list): assert not isinstance(predicate,list), "subject / predicate are both lists" if subject: for subj in subject: for (s1, p1, o1), cg in self.triples((subj,predicate,object_),context): yield (s1, p1, o1), cg else: for (s1, p1, o1), cg in self.triples((None,predicate,object_),context): yield (s1, p1, o1), cg elif isinstance(predicate,list): assert not isinstance(subject,list), "predicate / subject are both lists" if predicate: for pred in predicate: for (s1, p1, o1), cg in self.triples((subject,pred,object_),context): yield (s1, p1, o1), cg else: for (s1, p1, o1), cg in self.triples((subject,None,object_),context): yield (s1, p1, o1), cg def triples(self, (subject, predicate, object), context=None): """ A generator over all the triples matching the pattern. Pattern can include any objects for used for comparing against nodes in the store, for example, REGEXTerm, URIRef, Literal, BNode, Variable, Graph, QuotedGraph, Date? DateRange? A conjunctive query can be indicated by either providing a value of None for the context or the identifier associated with the Conjunctive Graph (if it's context aware). """ # variants of triples will be done if / when optimization is needed def __len__(self, context=None): """ Number of statements in the store. This should only account for non-quoted (asserted) statements if the context is not specified, otherwise it should return the number of statements in the formula or context given. """ def contexts(self, triple=None): """ Generator over all contexts in the graph. If triple is specified, a generator over all contexts the triple is in. """ # Optional Namespace methods def bind(self, prefix, namespace): """ """ def prefix(self, namespace): """ """ def namespace(self, prefix): """ """ def namespaces(self): """ """ # Optional Transactional methods def commit(self): """ """ def rollback(self): """ """ rdflib-2.4.2/rdflib/store/Concurrent.py0000644000175000017500000000522111153616032017013 0ustar nachonachofrom __future__ import generators from threading import Lock class ResponsibleGenerator(object): """A generator that will help clean up when it is done being used.""" __slots__ = ['cleanup', 'gen'] def __init__(self, gen, cleanup): self.cleanup = cleanup self.gen = gen def __del__(self): self.cleanup() def __iter__(self): return self def next(self): return self.gen.next() class Concurrent(object): def __init__(self, store): self.store = store # number of calls to visit still in progress self.__visit_count = 0 # lock for locking down the indices self.__lock = Lock() # lists for keeping track of added and removed triples while # we wait for the lock self.__pending_removes = [] self.__pending_adds = [] def add(self, (s, p, o)): if self.__visit_count==0: self.store.add((s, p, o)) else: self.__pending_adds.append((s, p, o)) def remove(self, (subject, predicate, object)): if self.__visit_count==0: self.store.remove((subject, predicate, object)) else: self.__pending_removes.append((subject, predicate, object)) def triples(self, (subject, predicate, object)): g = self.store.triples((subject, predicate, object)) pending_removes = self.__pending_removes self.__begin_read() for s, p, o in ResponsibleGenerator(g, self.__end_read): if not (s, p, o) in pending_removes: yield s, p, o for (s, p, o) in self.__pending_adds: if (subject==None or subject==s) and (predicate==None or predicate==p) and (object==None or object==o): yield s, p, o def __len__(self): return self.store.__len__() def __begin_read(self): lock = self.__lock lock.acquire() self.__visit_count = self.__visit_count + 1 lock.release() def __end_read(self): lock = self.__lock lock.acquire() self.__visit_count = self.__visit_count - 1 if self.__visit_count==0: pending_removes = self.__pending_removes while pending_removes: (s, p, o) = pending_removes.pop() try: self.store.remove((s, p, o)) except: # TODO: change to try finally? print s, p, o, "Not in store to remove" pending_adds = self.__pending_adds while pending_adds: (s, p, o) = pending_adds.pop() self.store.add((s, p, o)) lock.release() rdflib-2.4.2/rdflib/store/BDBOptimized.py0000644000175000017500000004324211153616032017152 0ustar nachonachoimport warnings from bsddb import db from urllib import pathname2url from os import mkdir from os.path import exists, abspath from rdflib import URIRef from rdflib.store import Store, VALID_STORE, CORRUPTED_STORE, NO_STORE, UNKNOWN SUPPORT_MULTIPLE_STORE_ENVIRON = False warnings.warn("The BDBOptimized store is experimental and not yet recommended for production.") if db.version() < (4,3,29): warnings.warn("Your BDB library may not be supported.") import logging _logger = logging.getLogger(__name__) # TODO: performance testing? class NamespaceIndex: def __init__(self, db_env): self.__db_env = db_env self.__namespace = db.DB(db_env) self.__namespace.open('namespace.db', None, db.DB_BTREE, db.DB_CREATE | db.DB_AUTO_COMMIT) self.__prefix = db.DB(db_env) self.__prefix.open("prefix.db", None, db.DB_BTREE, db.DB_CREATE | db.DB_AUTO_COMMIT) def bind(self, prefix, namespace): prefix = prefix.encode("utf-8") namespace = namespace.encode("utf-8") t = self.__db_env.txn_begin() try: bound_prefix = self.__prefix.get(namespace, txn=t) if bound_prefix: self.__namespace.delete(bound_prefix, txn=t) self.__prefix.put(namespace, prefix, txn=t) self.__namespace.put(prefix, namespace, txn=t) t.commit(0) except Exception, e: t.abort() def namespaces(self): cursor = self.__namespace.cursor() results = [] current = cursor.first() while current: prefix, namespace = current results.append((prefix, namespace)) current = cursor.next() cursor.close() for prefix, namespace in results: yield prefix, URIRef(namespace) def prefix(self, namespace): namespace = namespace.encode("utf-8") t = self.__db_env.txn_begin() try: r = self.__prefix.get(namespace, None) t.commit(0) return r except Exception, e: t.abort() raise e def namespace(self, prefix): prefix = prefix.encode("utf-8") t = self.__db_env.txn_begin() try: r = self.__namespace.get(prefix, None) t.commit(0) return r except Exception, e: t.abort() raise e def close(self): self.__namespace.close() self.__prefix.close() class IDMap: def __init__(self, db_env, node_pickler): self.__db_env = db_env self.__dbp = db.DB(db_env) self.__dbp.open("IDMap_hash.db", None, db.DB_HASH, db.DB_CREATE | db.DB_AUTO_COMMIT) self.__dbs = db.DB(db_env) self.__dbs.open("IDMap_recno.db", None, db.DB_RECNO, db.DB_CREATE | db.DB_AUTO_COMMIT) # pickling and un-pickling the data self.__node_pickler = node_pickler self.__loads = self.__node_pickler.loads self.__dumps = self.__node_pickler.dumps def insert(self, key): # this inserts a new key if the key was not available t = self.__db_env.txn_begin() try: k = self.__dumps(key) val = self.__dbp.get(k, txn=t) # the key is not found, register a new value for it if val is None: val = "%s" % self.__dbs.append(k, t) #dbp.put("counter", counter, txn=t) self.__dbp.put(k, val, txn=t) t.commit(0) return val except Exception, e: t.abort() # t2.commit(0) def get_id(self, key): k = self.__dumps(key) t = self.__db_env.txn_begin() try: val = self.__dbp.get(k, txn=t) t.commit(0) if val == None: return None return val except Exception, e: t.abort() def get_var(self, num): t = self.__db_env.txn_begin() try: val = self.__dbs.get(num, txn=t) t.commit(0) return self.__loads(val) except Exception, e: t.abort() def close(self): self.__dbp.close() self.__dbs.close() def all(self): l = [] cursor = self.__dbs.cursor() current = cursor.first() while current: try: key, value = current l.append((key, value)) current = cursor.next() except Exception, e: cursor.close() cursor.close() return l def secondaryIndexKey(key, data): # returns the first part of a tuple of ints joined by : in a str. return (data.split("^")[0]) class QuadIndex: def __init__(self, db_env, idmapper): self.__db_env = db_env self.__map = idmapper self.__splitter = '^' self.__index_list = ['spoc', 'pocs', 'ocsp', 'ospc', 'cspo', 'cpso'] self.__indices = self.__init_indices() self.__use_index = self.__init_use_index() self.__re_order = self.__init_re_order() self.__open = True def __init_indices(self): indices = {} for index in self.__index_list: indices[index] = db.DB(self.__db_env) indices[index].open("index_%s.db" % index, None, db.DB_BTREE, db.DB_CREATE | db.DB_AUTO_COMMIT) return indices def __init_re_order(self): # create functions that changes the variable order back # to s,p,o,c re_order = {} re_order['spoc'] = lambda (s,p,o,c): (s,p,o,c) re_order['pocs'] = lambda (p,o,c,s): (s,p,o,c) re_order['ocsp'] = lambda (o,c,s,p): (s,p,o,c) re_order['ospc'] = lambda (o,s,p,c): (s,p,o,c) re_order['cspo'] = lambda (c,s,p,o): (s,p,o,c) re_order['cpso'] = lambda (c,p,s,o): (s,p,o,c) return re_order def __init_use_index(self): # a hashmap deciding which index to use depending on bound variables # there are 16 combinations and 6 indices use_index = {} # spoc use_index[(False, False, False, False)] = 'spoc' use_index[(True, False, False, False)] = 'spoc' use_index[(True, True, False, False)] = 'spoc' use_index[(True, True, True, False)] = 'spoc' use_index[(True, True, True, True)] = 'spoc' # pocs use_index[(False, True, False, False)] = 'pocs' use_index[(False, True, True, False)] = 'pocs' use_index[(False, True, True, True)] = 'pocs' # ocsp use_index[(False, False, True, False)] = 'ocsp' use_index[(False, False, True, True)] = 'ocsp' use_index[(True, False, True, True)] = 'ocsp' # cspo use_index[(False, False, False, True)] = 'cspo' use_index[(True, False, False, True)] = 'cspo' use_index[(True, True, False, True)] = 'cspo' # cpso use_index[(False, True, False, True)] = 'cpso' # ospc use_index[(True, False, True, False)] = 'ospc' return use_index def insert(self, (s,p,o,c)): # check if the key is available, # make sure there is a mapping for all the values s_id = self.__map.insert(s) p_id = self.__map.insert(p) o_id = self.__map.insert(o) c_id = self.__map.insert(c) index_map = self.__init_index_map((s_id, p_id, o_id, c_id)) t = self.__db_env.txn_begin() try: for index in self.__indices: self.__indices[index].put(index_map[index], '', txn=t) t.commit(0) except Exception, e: t.abort() def delete(self, (s,p,o,c), txn=None): (s_id, p_id, o_id, c_id) = self.__map_id((s,p,o,c)) # setup the indices index_map = self.__init_index_map((s_id, p_id, o_id, c_id)) # since an index is in used within a transaction to traverse # the keys to delete, the delete deadlocks when acting on that index # close the cursor in __all_prefix before yielding? if txn == None: t = self.__db_env.txn_begin() else: t = self.__db_env.txn_begin(txn) try: for index in self.__indices: self.__indices[index].delete(index_map[index], txn=t, flags=0) t.commit(0) except Exception, e: t.abort() # t2.commit(0) # returns a mapping from index configuration to a # string in the format v1^v2^v3^v4, which is used # as a key in the index def __init_index_map(self, (s_id,p_id,o_id,c_id)): indices = {} indices['spoc'] = self.__splitter.join([str(k) for k in (s_id, p_id, o_id, c_id)]) indices['pocs'] = self.__splitter.join([str(k) for k in (p_id, o_id, c_id, s_id)]) indices['ocsp'] = self.__splitter.join([str(k) for k in (o_id, c_id, s_id, p_id)]) indices['ospc'] = self.__splitter.join([str(k) for k in (o_id, s_id, p_id, c_id)]) indices['cspo'] = self.__splitter.join([str(k) for k in (c_id, s_id, p_id, o_id)]) indices['cpso'] = self.__splitter.join([str(k) for k in (c_id, p_id, s_id, o_id)]) return indices # a 0 (or '0') in a BDB range query is first in the range # returns the list of ints representing the bound # variables in the index def __map_id(self, (s,p,o,c)): def map_id(val): m = self.__map.get_id(val) if m == None: return 0 return int(m) return [map_id(v) for v in (s,p,o,c)] def __map_var(self, (s_id, p_id, o_id, c_id)): def map_var(val): v = self.__map.get_var(int(val)) if v == None: return '' return v return tuple([map_var(v) for v in (s_id, p_id, o_id, c_id)]) def triples(self, (s,p,o,c), twopass=False): # TODO: implement a twopass version where all IDs are collected before # being mapped to their real values. Does this improve performance? # # iterates over the triples depending on the values of s,p,o,c indices = {} (s_id, p_id, o_id, c_id) = self.__map_id((s,p,o,c)) # setup the indices indices = self.__init_index_map((s_id, p_id, o_id, c_id)) # get the bool map for the current configuration (s_bool, p_bool, o_bool, c_bool) = [v != 0 for v in (s_id, p_id, o_id, c_id)] current_index = self.__use_index[(s_bool, p_bool, o_bool, c_bool)] prefix = indices[current_index] # strip of all ^0 # no bound variables if not (True in (s_bool, p_bool, o_bool, c_bool)): prefix = '' # bound variables found, strip of trailing ^0 for the prefix elif self.__splitter + '0' in prefix: prefix = prefix[0:prefix.find(self.__splitter + '0')] # otherwise use the given prefix re_order_f = self.__re_order[current_index] # convert the key back into the corresponding values for k,v in self.__all_prefix(prefix, current_index): (s,p,o,c) = self.__map_var(re_order_f(k.split(self.__splitter))) # print (k,v, prefix, indices[current_index], s, p, o, c) yield ((s,p,o), c) return def contexts(self, triple=None): for k,v in self.__all_prefix('', index='cspo'): (c,s,p,o) = self.__map_var(k.split(self.__splitter)) yield c def remove(self, (s,p,o,c)): [self.delete((s_t,p_t,o_t,c_t)) for ((s_t,p_t,o_t),c_t) in self.triples((s,p,o,c))] def __len__(self, context=None): return len([x for x in self.triples((None, None, None, context))]) def __all_prefix(self, prefix, index='spoc'): next = True next_key = prefix while next: c = self.__indices[index].cursor() try: current = c.set_range(next_key) next = c.next() if next: next_key, data = next except db.DBNotFoundError, e: next = None # what happens when the cursor is closed and re-opened between # each access, does this mean that the lookup will be done again # or is the location preserved somehow? # in the first case it is better to collect a list of results and # then yield over this list c.close() if current: key, data = current if key and key.startswith(prefix): yield key, data if next_key and not next_key.startswith(prefix): next = None def close(self): self.__open = False for index in self.__indices: self.__indices[index].close() class BDBOptimized(Store): """ An alternative BDB store implementing the index-structure proposed in: http://sw.deri.org/2005/02/dexa/yars.pdf Index structures key -> int, int -> key for variable to id and id -> variable Triple indices: spoc, pocs, ocsp, cspo, cpso, ospc This store is both transaction and context-aware. """ context_aware = True formula_aware = False # TODO: transaction support transaction_aware = True def __init__(self, configuration=None, identifier=None): self.__open = False self.__identifier = identifier self.configuration = configuration self.__locks = 5000 self.__db_env = None self.__id_mapper = None self.__quad_index = None self.__namespace_index = None # Store.__init__ calls open if there is a configuration super(BDBOptimized, self).__init__(configuration) def __get_identifier(self): return self.__identifier identifier = property(__get_identifier) def _init_db_environment(self, homeDir, create=True): #NOTE: The identifier is appended to the path as the location for the db #This provides proper isolation for stores which have the same path but different identifiers if SUPPORT_MULTIPLE_STORE_ENVIRON: fullDir = join(homeDir,self.identifier) else: fullDir = homeDir envsetflags = db.DB_CDB_ALLDB envflags = db.DB_INIT_MPOOL | db.DB_INIT_LOCK | db.DB_THREAD | db.DB_INIT_TXN | db.DB_RECOVER if not exists(fullDir): if create==True: makedirs(fullDir) self.create(path) else: return NO_STORE db_env = db.DBEnv() db_env.set_cachesize(0, 1024*1024*50) # TODO # enable deadlock-detection db_env.set_lk_detect(db.DB_LOCK_MAXLOCKS) # increase the number of locks, this is correlated to the size (num triples) that # can be added/removed with a single transaction db_env.set_lk_max_locks(self.__locks) db_env.set_lk_max_lockers(self.__locks) db_env.set_lk_max_objects(self.__locks) #db_env.set_lg_max(1024*1024) #db_env.set_flags(envsetflags, 1) db_env.open(fullDir, envflags | db.DB_CREATE,0) return db_env def is_open(self): return self.__open def open(self, path, create=True): homeDir = path if self.__identifier is None: self.__identifier = URIRef(pathname2url(abspath(homeDir))) self.__db_env = self._init_db_environment(homeDir, create) self.__open = True self.__id_mapper = IDMap(self.__db_env, self.node_pickler) self.__quad_index = QuadIndex(self.__db_env, self.__id_mapper) self.__namespace_index = NamespaceIndex(self.__db_env) def triples(self, (subject, predicate, object), context=None): for result in self.__quad_index.triples((subject, predicate, object, context)): yield result def contexts(self, triple=None): return self.__quad_index.contexts(triple=triple) def add(self, (subject, predicate, object), context, quoted=False, txn=None): """\ Add a triple to the store of triples. """ assert self.__open, "The Store must be open." Store.add(self, (subject, predicate, object), context, quoted) self.__quad_index.insert((subject, predicate, object, context)) def remove(self, (subject, predicate, object), context, txn=None): """ Remove the matching triples and/or context from the store. Variables can be unbound by using None. """ assert self.__open, "The Store must be open." Store.remove(self, (subject, predicate, object), context) self.__quad_index.remove((subject, predicate, object, context)) def bind(self, prefix, namespace): return self.__namespace_index.bind(prefix, namespace) def namespace(self, prefix): return self.__namespace_index.namespace(prefix) def prefix(self, namespace): return self.__namespace_index.prefix(namespace) def namespaces(self): for r in self.__namespace_index.namespaces(): yield r def __len__(self, context=None): return self.__quad_index.__len__(context) def close(self, commit_pending_transaction=True): self.__open = False self.__id_mapper.close() self.__quad_index.close() self.__namespace_index.close() self.__db_env.close() rdflib-2.4.2/rdflib/RDF.py0000644000175000017500000000144011153616035014152 0ustar nachonachofrom rdflib.Namespace import Namespace RDFNS = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") # Syntax names RDF = RDFNS["RDF"] Description = RDFNS["Description"] ID = RDFNS["ID"] about = RDFNS["about"] parseType = RDFNS["parseType"] resource = RDFNS["resource"] li = RDFNS["li"] nodeID = RDFNS["nodeID"] datatype = RDFNS["datatype"] # RDF Classes Seq = RDFNS["Seq"] Bag = RDFNS["Bag"] Alt = RDFNS["Alt"] Statement = RDFNS["Statement"] Property = RDFNS["Property"] XMLLiteral = RDFNS["XMLLiteral"] List = RDFNS["List"] # RDF Properties subject = RDFNS["subject"] predicate = RDFNS["predicate"] object = RDFNS["object"] type = RDFNS["type"] value = RDFNS["value"] first = RDFNS["first"] rest = RDFNS["rest"] # and _n where n is a non-negative integer # RDF Resources nil = RDFNS["nil"] rdflib-2.4.2/rdflib/compat.py0000644000175000017500000000051411153616035015023 0ustar nachonachoimport sys if sys.version_info < (2, 4, 1, 'alpha', 1): def rsplit(value, char=None, count=-1): # rsplit is not available in Python < 2.4a1 if char is None: char = ' ' parts = value.split(char) return [char.join(parts[:-count])] + parts[-count:] else: from string import rsplit rdflib-2.4.2/rdflib/exceptions.py0000644000175000017500000000374711153616035015734 0ustar nachonacho class Error(Exception): """Base class for rdflib exceptions.""" def __init__(self, msg=None): Exception.__init__(self, msg) self.msg = msg class TypeCheckError(Error): """Parts of assertions are subject to type checks.""" def __init__(self, node): Error.__init__(self, node) self.type = type(node) self.node = node class SubjectTypeError(TypeCheckError): """Subject of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Subject must be instance of URIRef or BNode: %s(%s)" \ % (self.node, self.type) class PredicateTypeError(TypeCheckError): """Predicate of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Predicate must be a URIRef instance: %s(%s)" \ % (self.node, self.type) class ObjectTypeError(TypeCheckError): """Object of an assertion must be an instance of URIRef, Literal, or BNode.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Object must be instance of URIRef, Literal, or BNode: %s(%s)" % \ (self.node, self.type) class ContextTypeError(TypeCheckError): """Context of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Context must be instance of URIRef or BNode: %s(%s)" \ % (self.node, self.type) class ParserError(Error): """RDF Parser error.""" def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class UniquenessError(Error) : """A uniqueness assumption was made in the context, and that is not true""" def __init__(self, values): Error.__init__(self, "Uniqueness assumption is not fulfilled. Multiple values are: %s" % values) rdflib-2.4.2/rdflib/interfaces.py0000644000175000017500000001431611153616035015670 0ustar nachonacho try: from zope.interface import Interface, classImplements, implements except ImportError: class Interface(object): pass def classImplements(c, i): pass def implements(*args): pass from rdflib import RDF class IGraph(Interface): """\ An rdflib.Graph indexes data expressed in the Resource Description Framework (RDF). Any kind of content, whether inside Zope or from some outside source, can be cataloged if it can describe itself using the RDF standard. Any kind of RDF vocabulary like RSS, OWL, DAML+OIL, Dublin Core, or any kind of XML schema or data can be expressed into the graph. Once data is graphed it can be queried using either the Python query interface, a TALES-based RDF query expression language, or the sparql rdf query language. Results of a query can be either a generator of result records or RDF in xml or NT format. In Semantic Web terms, a graph is a persistent triple store. RDF is broken down into subject, predicate, and object relations (called triples) and each relation is indexed. The triple store can then be queried for triples that match patterns. """ def parse(rdf, format="xml"): """ Parse RDF-XML into the catalog. """ def add((subject, predicate, object)): """ Add one triple to the catalog. """ def remove((subject, predicate, object)): """ Remove one triple from the catalog. """ def triples((subject, predicate, object), *args): """ Query the triple store. """ def contexts(triple=None): """ Generator over all contexts in the graph. If triple is specified, a generator over all contexts the triple is in.""" def value(subject, predicate=RDF.value, object=None, default=None, any=False): """ Get a value for a subject/predicate, predicate/object, or subject/object pair -- exactly one of subject, predicate, object must be None. Useful if one knows that there may only be one value. It is one of those situations that occur a lot, hence this 'macro' like utility Parameters: ----------- subject, predicate, object -- exactly one must be None default -- value to be returned if no values found any -- if True: return any value in the case there is more than one else: raise UniquenessError """ def label(subject, default=''): """ Queries for the RDFS.label of the subject, returns default if no label exists.""" def comment(subject, default=''): """ Queries for the RDFS.comment of the subject, returns default if no comment exists.""" def items(list): """Generator over all items in the resource specified by list (an RDF collection)""" def __iter__(): """ Iterates over all triples in the store.""" def __contains__(triple): """ Support for 'triple in graph' syntax.""" def __len__(context=None): """ Returns the number of triples in the graph. If context is specified then the number of triples in the context is returned instead.""" def __eq__(other): """ Test if Graph is exactly equal to Graph other.""" def __iadd__(other): """ Add all triples in Graph other to Graph.""" def __isub__(other): """ Subtract all triples in Graph other from Graph.""" def subjects(predicate=None, object=None): """ A generator of subjects with the given predicate and object.""" def predicates(subject=None, object=None): """ A generator of predicates with the given subject and object.""" def objects(subject=None, predicate=None): """ A generator of objects with the given subject and predicate.""" def subject_predicates(object=None): """ A generator of (subject, predicate) tuples for the given object""" def subject_objects(predicate=None): """ A generator of (subject, object) tuples for the given predicate""" def predicate_objects(subject=None): """ A generator of (predicate, object) tuples for the given subject""" def get_context(identifier): """ Returns a Context graph for the given identifier, which must be a URIRef or BNode.""" def remove_context(identifier): """ Removes the given context from the graph. """ def transitive_objects(subject, property, remember=None): """ """ def transitive_subjects(predicate, object, remember=None): """ """ def load(location, publicID=None, format="xml"): """ for b/w compat. See parse.""" def save(location, format="xml", base=None, encoding=None): """ for b/x compat. See serialize.""" def context_id(uri): pass def parse(source, publicID=None, format="xml"): """ Parse source into Graph. If Graph is context-aware it'll get loaded into it's own context (sub graph). Format defaults to xml (AKA rdf/xml). The publicID argument is for specifying the logical URI for the case that it's different from the physical source URI. Returns the context into which the source was parsed.""" def serialize(destination=None, format="xml", base=None, encoding=None): """ Serialize the Graph to destination. If destination is None serialize method returns the serialization as a string. Format defaults to xml (AKA rdf/xml).""" def seq(subject): """ Check if subject is an rdf:Seq. If yes, it returns a Seq class instance, None otherwise. """ def absolutize(uri, defrag=1): """ Will turn uri into an absolute URI if it's not one already. """ def bind(prefix, namespace, override=True): """Bind prefix to namespace. If override is True will bind namespace to given prefix if namespace was already bound to a different prefix.""" def namespaces(): """Generator over all the prefix, namespace tuples. """ class IIdentifier(Interface): def n3(): """ Return N3 representation of identifier. """ def startswith(string): """ dummy. """ def __cmp__(other): """ dummy. """ rdflib-2.4.2/rdflib/term_utils.py0000644000175000017500000001035711153616035015735 0ustar nachonachofrom rdflib import * from rdflib.Graph import QuotedGraph, Graph, ConjunctiveGraph, BackwardCompatGraph #Takes an instance of a Graph (Graph, QuotedGraph, ConjunctiveGraph, or BackwardCompatGraph) #and returns the Graphs identifier and 'type' ('U' for Graphs, 'F' for QuotedGraphs ). def normalizeGraph(graph): if isinstance(graph,QuotedGraph): return graph.identifier, 'F' else: return graph.identifier , term2Letter(graph.identifier) TERM_INSTANCIATION_DICT ={ 'U':URIRef, 'B':BNode, 'V':Variable, 'L':Literal } GRAPH_TERM_DICT = { 'F': (QuotedGraph, URIRef), 'U': (Graph, URIRef), 'B': (Graph, BNode) } def term2Letter(term): if isinstance(term,URIRef): return 'U' elif isinstance(term,BNode): return 'B' elif isinstance(term,Literal): return 'L' elif isinstance(term,QuotedGraph): return 'F' elif isinstance(term,Variable): return 'V' elif isinstance(term,Graph): return term2Letter(term.identifier) elif term is None: return 'L' else: raise Exception("The given term (%s) is not an instance of any of the known types (URIRef,BNode,Literal,QuotedGraph, or Variable). It is a %s"%(term,type(term))) def constructGraph(term): return GRAPH_TERM_DICT[term] def triplePattern2termCombinations((s,p,o)): combinations=[] #combinations.update(TERM_COMBINATIONS) if isinstance(o,Literal): for key,val in TERM_COMBINATIONS.items(): if key[OBJECT] == 'O': combinations.append(val) return combinations def type2TermCombination(member,klass,context): try: rt = TERM_COMBINATIONS['%sU%s%s'%(term2Letter(member),term2Letter(klass),normalizeGraph(context)[-1])] return rt except: raise Exception("Unable to persist classification triple: %s %s %s"%(member,'rdf:type',klass,context)) def statement2TermCombination(subject,predicate,obj,context): return TERM_COMBINATIONS['%s%s%s%s'%(term2Letter(subject),term2Letter(predicate),term2Letter(obj),normalizeGraph(context)[-1])] SUBJECT = 0 PREDICATE = 1 OBJECT = 2 CONTEXT = 3 TERM_COMBINATIONS = { 'UUUU' : 0, 'UUUB' : 1, 'UUUF' : 2, 'UUVU' : 3, 'UUVB' : 4, 'UUVF' : 5, 'UUBU' : 6, 'UUBB' : 7, 'UUBF' : 8, 'UULU' : 9, 'UULB' : 10, 'UULF' : 11, 'UUFU' : 12, 'UUFB' : 13, 'UUFF' : 14, 'UVUU' : 15, 'UVUB' : 16, 'UVUF' : 17, 'UVVU' : 18, 'UVVB' : 19, 'UVVF' : 20, 'UVBU' : 21, 'UVBB' : 22, 'UVBF' : 23, 'UVLU' : 24, 'UVLB' : 25, 'UVLF' : 26, 'UVFU' : 27, 'UVFB' : 28, 'UVFF' : 29, 'VUUU' : 30, 'VUUB' : 31, 'VUUF' : 33, 'VUVU' : 34, 'VUVB' : 35, 'VUVF' : 36, 'VUBU' : 37, 'VUBB' : 38, 'VUBF' : 39, 'VULU' : 40, 'VULB' : 41, 'VULF' : 42, 'VUFU' : 43, 'VUFB' : 44, 'VUFF' : 45, 'VVUU' : 46, 'VVUB' : 47, 'VVUF' : 48, 'VVVU' : 49, 'VVVB' : 50, 'VVVF' : 51, 'VVBU' : 52, 'VVBB' : 53, 'VVBF' : 54, 'VVLU' : 55, 'VVLB' : 56, 'VVLF' : 57, 'VVFU' : 58, 'VVFB' : 59, 'VVFF' : 60, 'BUUU' : 61, 'BUUB' : 62, 'BUUF' : 63, 'BUVU' : 64, 'BUVB' : 65, 'BUVF' : 66, 'BUBU' : 67, 'BUBB' : 68, 'BUBF' : 69, 'BULU' : 70, 'BULB' : 71, 'BULF' : 72, 'BUFU' : 73, 'BUFB' : 74, 'BUFF' : 75, 'BVUU' : 76, 'BVUB' : 77, 'BVUF' : 78, 'BVVU' : 79, 'BVVB' : 80, 'BVVF' : 81, 'BVBU' : 82, 'BVBB' : 83, 'BVBF' : 84, 'BVLU' : 85, 'BVLB' : 86, 'BVLF' : 87, 'BVFU' : 88, 'BVFB' : 89, 'BVFF' : 90, 'FUUU' : 91, 'FUUB' : 92, 'FUUF' : 93, 'FUVU' : 94, 'FUVB' : 95, 'FUVF' : 96, 'FUBU' : 97, 'FUBB' : 98, 'FUBF' : 99, 'FULU' : 100, 'FULB' : 101, 'FULF' : 102, 'FUFU' : 103, 'FUFB' : 104, 'FUFF' : 105, 'FVUU' : 106, 'FVUB' : 107, 'FVUF' : 108, 'FVVU' : 109, 'FVVB' : 110, 'FVVF' : 111, 'FVBU' : 112, 'FVBB' : 113, 'FVBF' : 114, 'FVLU' : 115, 'FVLB' : 116, 'FVLF' : 117, 'FVFU' : 118, 'FVFB' : 119, 'FVFF' : 120, } REVERSE_TERM_COMBINATIONS = dict([(value,key) for key,value in TERM_COMBINATIONS.items()]) rdflib-2.4.2/rdflib/events.py0000644000175000017500000000501711153616035015047 0ustar nachonacho __doc__ = """ Dirt Simple Events A Dispatcher (or a subclass of Dispatcher) stores event handlers that are 'fired' simple event objects when interesting things happen. Create a dispatcher: >>> d = Dispatcher() Now create a handler for the event and subscribe it to the dispatcher to handle Event events. A handler is a simple function or method that accepts the event as an argument: >>> def handler1(event): print `event` >>> d.subscribe(Event, handler1) Now dispatch a new event into the dispatcher, and see handler1 get fired: >>> d.dispatch(Event(foo='bar', data='yours', used_by='the event handlers')) """ class Event(object): """ An event is a container for attributes. The source of an event creates this object, or a subclass, gives it any kind of data that the events handlers need to handle the event, and then calls notify(event). The target of an event registers a function to handle the event it is interested with subscribe(). When a sources calls notify(event), each subscriber to that even will be called i no particular order. """ def __init__(self, **kw): self.__dict__.update(kw) def __repr__(self): attrs = self.__dict__.keys() attrs.sort() return '' % ([a for a in attrs],) class Dispatcher(object): """ An object that can dispatch events to a privately managed group of subscribers. """ _dispatch_map = None def set_map(self, amap): self._dispatch_map = amap def get_map(self): return self._dispatch_map def subscribe(self, event_type, handler): """ Subscribe the given handler to an event_type. Handlers are called in the order they are subscribed. """ if self._dispatch_map is None: self.set_map({}) lst = self._dispatch_map.get(event_type, None) if lst is None: lst = [handler] else: lst.append(handler) self._dispatch_map[event_type] = lst def dispatch(self, event): """ Dispatch the given event to the subscribed handlers for the event's type""" if self._dispatch_map is not None: lst = self._dispatch_map.get(type(event), None) if lst is None: raise ValueError("unknown event type: %s" % type(event)) for l in lst: l(event) def test(): import doctest doctest.testmod() if __name__ == '__main__': test() rdflib-2.4.2/rdflib/constants.py0000644000175000017500000000235111153616035015555 0ustar nachonacho""" Deprecated. Use rdflib.RDF and rdflib.RDFS instead. """ import warnings warnings.warn("Use rdflib.RDF and rdflib.RDFS instead.", DeprecationWarning, stacklevel=2) from rdflib import RDF as _RDF from rdflib import RDFS as _RDFS RDFNS = _RDF.RDFNS # Syntax names RDF = _RDF.RDF DESCRIPTION = _RDF.Description ID = _RDF.ID ABOUT = _RDF.about PARSE_TYPE = _RDF.parseType RESOURCE = _RDF.resource LI = _RDF.li NODE_ID = _RDF.nodeID DATATYPE = _RDF.datatype # RDF Classes SEQ = _RDF.Seq BAG = _RDF.Bag ALT = _RDF.Alt STATEMENT = _RDF.Statement PROPERTY = _RDF.Property XMLLiteral = _RDF.XMLLiteral LIST = _RDF.List # RDF Properties SUBJECT = _RDF.subject PREDICATE = _RDF.predicate OBJECT = _RDF.object TYPE = _RDF.type VALUE = _RDF.value FIRST = _RDF.first REST = _RDF.rest # and _n where n is a non-negative integer # RDF Resources NIL = _RDF.nil # SCHEMA RDFSNS = _RDFS.RDFSNS RDFS_CLASS = _RDFS.Class RDFS_RESOURCE = _RDFS.Resource RDFS_SUBCLASSOF = _RDFS.subClassOf RDFS_SUBPROPERTYOF = _RDFS.subPropertyOf RDFS_ISDEFINEDBY = _RDFS.isDefinedBy RDFS_LABEL = _RDFS.label RDFS_COMMENT = _RDFS.comment RDFS_RANGE = _RDFS.range RDFS_DOMAIN = _RDFS.domain RDFS_LITERAL = _RDFS.Literal RDFS_CONTAINER = _RDFS.Container RDFS_SEEALSO = _RDFS.seeAlso rdflib-2.4.2/rdflib/Identifier.py0000644000175000017500000000047211153616035015625 0ustar nachonachofrom rdflib.Node import Node class Identifier(Node,unicode): # we allow Identifiers to be Nodes in our Graph """ See http://www.w3.org/2002/07/rdf-identifer-terminology/ regarding choice of terminology. """ __slots__ = () def __new__(cls, value): return unicode.__new__(cls,value) rdflib-2.4.2/rdflib/Variable.py0000644000175000017500000000111311153616035015261 0ustar nachonachofrom rdflib.Identifier import Identifier try: from hashlib import md5 except ImportError: from md5 import md5 class Variable(Identifier): """ """ __slots__ = () def __new__(cls, value): if value[0]=='?': value=value[1:] return unicode.__new__(cls, value) def __repr__(self): return self.n3() def n3(self): return "?%s" % self def __reduce__(self): return (Variable, (unicode(self),)) def md5_term_hash(self): d = md5(str(self)) d.update("V") return d.hexdigest()rdflib-2.4.2/rdflib/sparql/0000755000175000017500000000000011204354476014475 5ustar nachonachordflib-2.4.2/rdflib/sparql/parser.py0000644000175000017500000002672211153616034016346 0ustar nachonacho#!/usr/bin/python """ SPARQL Lexer, Parser and Function-Mapper By Shawn Brown TO DO: swap current parser functions for Michelp's pyparsing setup add mapping for FILTER/constraints typed literals integer, double or boolean abbreviations language tags (e.g., @fr) nested OPTIONALs ??? blank node and RDF collection syntax ??? GRAPH statements ??? CURRENTLY SUPPORTED: Simple SELECT queries Predicate-object and object list shorthand (e.g., ?x foaf:name ?name ; foaf:mbox ?mbox ; vcard:TITLE ?title) Multi-line/triple-quoted literals BASE, PREFIX, SELECT, WHERE, UNION, OPTIONAL, multiple UNIONs and multiple OPTIONALs (but not nested OPTIONALs) USAGE: #from sparql_lpm import doSPARQL from rdflib.sparql.parser import doSPARQL ...load graph... ...define SPARQL query as string... result = doSPARQL(queryStr, sparqlGr) """ import base64 import re from rdflib.URIRef import URIRef from rdflib.sparql.graphPattern import GraphPattern def _escape(text): return base64.encodestring(text).replace("\n", "") def _unescape(text): return base64.decodestring(text) def _escapeLiterals(query): """ escape all literals with escape() """ fn = lambda m: "'" + _escape(m.group(2)) + "'" + m.group(3) pat = r"(\"\"\"|'''|[\"'])([^\1]*?[^\\]?)\1" # literal return re.sub(pat+"(\s*[.,;\}])", fn, query) def _resolveShorthand(query): """ resolve some of the syntactic shorthand (2.8 Other Syntactic Forms) """ def doList(pat, text): pat = re.compile(pat) while pat.search(text): text = re.sub(pat, r"\1\2\3 . \2\4", text) return text # 2.8.1 Predicate-Object Lists pat = r"(\{.*?)([^ ]+ )([^ ]+ [^ ]+)\s?; ([^ ]+ [^ ]+\s?[,;\.\}])" query = doList(pat, query) # 2.8.2 Object Lists pat = r"(\{.*?)([^ ]+ [^ ]+ )([^ ]+\s?), ([^ ]+\s?[,\.\}])" query = doList(pat, query) # TO DO: look at adding all that other crazy stuff!!! return query def _resolvePrefixes(query): """ resolve prefixed IRIs, remove PREFIX statements """ # parse PREFIX statements prefixes = re.findall("PREFIX ([\w\d]+:) <([^<>]+)>", query) # get list of prefix tuples prefixes.extend([ ("rdf:", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"), ("rdfs:", "http://www.w3.org/2000/01/rdf-schema#"), ("xsd:", "http://www.w3.org/2001/XMLSchema#"), ("fn:", "http://www.w3.org/2004/07/xpath-functions")]) matches = re.search("PREFIX : <([^<>]+)>", query) # parse colon-only PREFIX if matches != None: prefixes.append((":", matches.group(1))) query = re.sub("PREFIX [\w\d]*:[ ]?<[^<>]+>[ ]?", "", query) # remove PREFIX statements # escape IRIs (unescaped in ??) fn = lambda m: "<" + _escape(m.group(1)) + ">" query = re.sub("<([^<>]+)>", fn, query) # resolve prefixed IRIs for pair in prefixes: fn = lambda m: "<" + _escape(pair[1]+m.group(1)) + ">" # escaped too query = re.sub(pair[0]+"([^ .\}]+)", fn, query) return query def _resolveBase(query): """ resolve relative IRIs using BASE IRI, remove BASE statement """ pat = re.compile("BASE <([^<>]+)>\s?") base = pat.search(query) if base != None: fn = lambda m: "<" + base.group(1) + m.group(1) + ">" query = re.sub("<([^<>: ]+)>", fn, query) # resolve relative IRIs query = re.sub(pat, "", query) # remove BASE statement return query def _parseSelect(query): """ returns tuple of SELECTed variables or None """ var = "[?$][\\w\\d]+" # SELECT variable pattern select = re.search("SELECT(?: " + var + ")+", query) if select != None: select = re.findall(var, select.group(0)) select = tuple(select) return select class _StackManager: """ manages token stack for _parser() """ def __tokenGen(self, tokens): for token in tokens: yield token def __init__(self, tokenList): self.stack = self.__tokenGen(tokenList) self.current = self.stack.next() def next(self): try: self.current = self.stack.next() if self.current == "": self.next() # if blank, move to next except StopIteration: self.current = None def token(self): return self.current # # The following classes, _listTypes dictionary and _makeList() function are # used to test for recognized keywords and to create "typed" lists for nested # statements when parsing the SPARQL query's WHERE statement # class Where(list): pass class Union(list): pass class Optional(list): pass _listTypes = { "OPTIONAL": lambda : Optional([]), "UNION": lambda : Union([]), "WHERE": lambda : Where([]) } def _makeList(keyword): """ return list of given type or None """ global _listTypes if keyword in _listTypes: return _listTypes[keyword]() return None def _parser(stack, listType="WHERE"): """ simple recursive descent SPARQL parser """ typedList = _makeList(listType) nestedType = listType while stack.token() != None: token = stack.token() if _makeList(token) != None: nestedType = token elif token == "{": stack.next() # iterate to next token typedList.append(_parser(stack, nestedType)) nestedType = listType # reset nestedType elif token == "}": return typedList elif token != ".": statement = "" while token != None and token != "." and token != "{" and token != "}": statement += " " + token stack.next() token = stack.token() statement = statement.strip() typedList.append(statement) continue stack.next() return typedList def _parseWhere(query): """ split query into tokens, return parsed object """ stackObj = _StackManager(query) return _parser(stackObj) def _findStatements(stmntType, stmntList): """ recurse over nested list, compile & return flat list of matching statement strings used by _getStatements() """ statements = [] typedList = _makeList(stmntType) for stmnt in stmntList: if type(stmnt) is str: statements.append(stmnt) if type(stmnt) == type(typedList): statements.extend(_findStatements(stmntType, stmnt)) return statements def _getStatements(stmntType, stmntList): """ gets statements of given type from given list """ statements = [] typedList = _makeList(stmntType) for item in stmntList: if type(item) == type(typedList): statements.append(_findStatements(stmntType, item)) return statements def _buildGraphPattern(triples): # split strings into tuples of strings triples = map((lambda x: tuple(re.split(" ", x))), triples) # convert tuples of strings into tuples of RDFLib objects isIRI = lambda x: x[0]=="<" and x[-1]==">" isLit = lambda x: x[0]=="'" and x[-1]=="'" or x[0]=='"' and x[-1]=='"' for i in range(len(triples)): sub = triples[i][0] pred = triples[i][1] obj = triples[i][2] # unescape and define objects for IRIs and literals if isIRI(sub): sub = URIRef(_unescape(sub[1:-1])) if isIRI(pred): pred = URIRef(_unescape(pred[1:-1])) if isIRI(obj): obj = URIRef(_unescape(obj[1:-1])) elif isLit(obj): obj = _unescape(obj[1:-1]) # build final triple triples[i] = (sub, pred, obj) return GraphPattern(triples) def _buildQueryArgs(query): """ """ # query lexer query = _escapeLiterals(query) # are unescaped in _buildGraphPattern() query = re.sub("\s+", " ", query).strip() # normalize whitespace query = _resolveShorthand(query) # resolve pred-obj and obj lists query = _resolveBase(query) # resolve relative IRIs query = _resolvePrefixes(query) # resolve prefixes query = re.sub(r"\s*([.;,\{\}])\s*", r" \1 ", query) # normalize punctuation whereObj = query[query.find("{")+1:query.rfind("}")].strip() # strip non-WHERE bits whereObj = whereObj.split(" ") # split into token stack # query parser select = _parseSelect(query) # select is tuple of select variables whereObj = _parseWhere(whereObj) # stack parsed into nested list of typed lists # map parsed object to arrays of RDFLib graphPattern objects where = _getStatements("WHERE", [whereObj]) # pass whereObj as nested list where.extend(_getStatements("UNION", whereObj)) where = map(_buildGraphPattern, where) optional = _getStatements("OPTIONAL", whereObj) optional = map(_buildGraphPattern, optional) # run query #return sparqlGr.query(select, where, optional) return { "select":select, "where":where, "optional":optional } def doSPARQL(query, sparqlGr): """ Takes SPARQL query & SPARQL graph, returns SPARQL query result object. """ x = _buildQueryArgs(query) return sparqlGr.query(x["select"], x["where"], x["optional"]) if __name__ == "__main__": testCases = [ # basic """ SELECT ?name WHERE { ?a ?name } """, # simple prefix """ PREFIX foaf: SELECT ?name WHERE { ?a foaf:name ?name } """, # base statement """ BASE SELECT ?name WHERE { ?a ?name } """, # prefix and colon-only prefix """ PREFIX : PREFIX vcard: SELECT ?name ?title WHERE { ?a :name ?name . ?a vcard:TITLE ?title } """, # predicate-object list notation """ PREFIX foaf: SELECT ?name ?mbox WHERE { ?x foaf:name ?name ; foaf:mbox ?mbox . } """, # object list notation """ PREFIX foaf: SELECT ?x WHERE { ?x foaf:nick "Alice" , "Alice_" . } """, # escaped literals """ PREFIX tag: PREFIX vcard: SELECT ?name WHERE { ?a tag:name ?name ; vcard:TITLE "escape test vcard:TITLE " ; "This is a ''' Test \"\"\"" ; ?d } """, # key word as variable """ PREFIX foaf: SELECT ?PREFIX ?WHERE WHERE { ?x foaf:name ?PREFIX ; foaf:mbox ?WHERE . } """, # key word as prefix """ PREFIX WHERE: SELECT ?name ?mbox WHERE { ?x WHERE:name ?name ; WHERE:mbox ?mbox . } """, # some test cases from grammar.py "SELECT ?title WHERE { ?title . }", """PREFIX foaf: SELECT ?name ?mbox WHERE { ?person foaf:name ?name . OPTIONAL { ?person foaf:mbox ?mbox} }""", """PREFIX foaf: SELECT ?name ?name2 WHERE { ?person foaf:name ?name . OPTIONAL { ?person foaf:knows ?p2 . ?p2 foaf:name ?name2 . } }""", """PREFIX foaf: #PREFIX rdf: SELECT ?name ?mbox WHERE { { ?person rdf:type foaf:Person } . OPTIONAL { ?person foaf:name ?name } . OPTIONAL {?person foaf:mbox ?mbox} . }""" ] print "Content-type: text/plain\n\n" for query in testCases: print "\n-----\n" print '>>> query = """' + query.replace("\n", "\n... ") + '"""' print ">>> result = doSPARQL(query, sparqlGr)\n" result = _buildQueryArgs(query); print "select = ", result["select"], "\n" print "where = ", result["where"], "\n" print "optional = ", result["optional"], "\n" print "result = sparqlGr.query(select, where, optional)" rdflib-2.4.2/rdflib/sparql/bison/0000755000175000017500000000000011204354476015607 5ustar nachonachordflib-2.4.2/rdflib/sparql/bison/SolutionModifier.py0000644000175000017500000000230711153616034021450 0ustar nachonachoASCENDING_ORDER = 1 DESCENDING_ORDER = 2 UNSPECIFIED_ORDER = 3 ORDER_VALUE_MAPPING = { ASCENDING_ORDER : 'Ascending', DESCENDING_ORDER : 'Descending', UNSPECIFIED_ORDER : 'Default', } class SolutionModifier(object): def __init__(self,orderClause=None,limitClause=None,offsetClause=None): self.orderClause = orderClause self.limitClause = limitClause self.offsetClause = offsetClause def __repr__(self): if not(self.orderClause or self.limitClause or self.offsetClause): return "" return ""%( self.orderClause and ' ORDER BY %s'%self.orderClause or '', self.limitClause and ' LIMIT %s'%self.limitClause or '', self.offsetClause and ' OFFSET %s'%self.offsetClause or '') class ParsedOrderConditionExpression(object): """ A list of OrderConditions OrderCondition ::= (('ASC'|'DESC')BrackettedExpression )|(FunctionCall|Var|BrackettedExpression) """ def __init__(self,expression,order): self.expression = expression self.order = order def __repr__(self): return "%s(%s)"%(ORDER_VALUE_MAPPING[self.order],self.expression.reduce()) rdflib-2.4.2/rdflib/sparql/bison/Util.py0000644000175000017500000000225011153616034017067 0ustar nachonachoclass ListRedirect(object): """ A utility class for lists of items joined by an operator. ListRedirects with length 1 are a special case and are considered equivalent to the item instead of a list containing it. The reduce function is used for normalizing ListRedirect to the single item (and calling reduce on it recursively) """ reducable = True def __getattr__(self, attr): if hasattr(self._list, attr): return getattr(self._list, attr) raise AttributeError, '%s has no such attribute %s' % (repr(self), attr) def __iter__(self): for i in self._list: yield i def reduce(self): if self.reducable and len(self._list) == 1: singleItem = self._list[0] if isinstance(singleItem,ListRedirect): return singleItem.reduce() else: return singleItem else: return type(self)([isinstance(item,ListRedirect) and item.reduce() or item for item in self._list]) #Utility function for adding items to the front of a list def ListPrepend(item,list): #print "adding %s to front of %s"%(item,list) return [item] + listrdflib-2.4.2/rdflib/sparql/bison/GraphPattern.py0000644000175000017500000001336611153616034020563 0ustar nachonacho""" See: http://www.w3.org/TR/rdf-sparql-query/#GraphPattern [20] GroupGraphPattern ::= '{' TriplesBlock? ( ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? )* '}' [22] GraphPatternNotTriples ::= OptionalGraphPattern | GroupOrUnionGraphPattern | GraphGraphPattern [26] Filter ::= 'FILTER' Constraint [27] Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall [56] BrackettedExpression ::= '(' ConditionalOrExpression ')' [24] OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern [25] GraphGraphPattern ::= 'GRAPH' VarOrBlankNodeOrIRIref GroupGraphPattern [26] GroupOrUnionGraphPattern ::= GroupGraphPattern ( 'UNION' GroupGraphPattern )* """ class ParsedGroupGraphPattern(object): """ See: http://www.w3.org/TR/rdf-sparql-query/#GroupPatterns A group graph pattern GP is a set of graph patterns, GPi. This class is defined to behave (literally) like a set of GraphPattern instances. """ def __init__(self,triples,graphPatterns): self.triples=triples self._graphPatterns = graphPatterns _g=[] if triples: _g=[GraphPattern(triples=triples)] if graphPatterns: _g.extend(graphPatterns) self.graphPatterns = _g def __iter__(self): for g in self.graphPatterns: if isinstance(g,GraphPattern): if not g.triples and g.nonTripleGraphPattern is None: continue else: yield g else: yield GraphPattern(triples=self.triples) def __len__(self): return len([g for g in self.graphPatterns if isinstance(g,GraphPattern) and (g.triples or g.nonTripleGraphPattern) is not None]) def __getitem__(self, k): return list(self.graphPatterns)[k] def __repr__(self): return "{ %s }"%repr(list(self)) class BlockOfTriples(object): """ A Basic Graph Pattern is a set of Triple Patterns. """ def __init__(self,statementList): self.statementList = statementList def __getattr__(self, attr): if hasattr(self.statementList, attr): return getattr(self.statementList, attr) raise AttributeError, '%s has no such attribute %s' % (repr(self), attr) def __repr__(self): return ""%repr(self.statementList) class GraphPattern(object): """ Complex graph patterns can be made by combining simpler graph patterns. The ways of creating graph patterns are: * Basic Graph Patterns, where a set of triple patterns must match * Group Graph Pattern, where a set of graph patterns must all match using the same variable substitution * Value constraints, which restrict RDF terms in a solution * Optional Graph patterns, where additional patterns may extend the solution * Alternative Graph Pattern, where two or more possible patterns are tried * Patterns on Named Graphs, where patterns are matched against named graphs ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? """ def __init__(self,nonTripleGraphPattern=None,filter=None,triples=None): #print "GraphPattern(..)",triples,filter,nonTripleGraphPattern triples = triples and triples or [] self.filter=filter self.triples = triples self.nonTripleGraphPattern = nonTripleGraphPattern def __repr__(self): if not self.triples and self.nonTripleGraphPattern is None: return "" elif self.triples and not self.nonTripleGraphPattern and not self.filter: return repr(self.triples) return "( %s '.'? %s )"%( self.filter is not None and self.filter or self.nonTripleGraphPattern, self.triples is not None and self.triples or '') # return ""%( # self.triples is not None and self.triples or '', # self.nonTripleGraphPattern is not None and ' %s'%self.nonTripleGraphPattern or '') class ParsedOptionalGraphPattern(ParsedGroupGraphPattern): """ An optional graph pattern is a combination of a pair of graph patterns. The second pattern modifies pattern solutions of the first pattern but does not fail matching of the overall optional graph pattern. """ def __init__(self,groupGraphPattern): self.triples=groupGraphPattern.triples self.graphPatterns = groupGraphPattern.graphPatterns # # super(ParsedOptionalGraphPattern,self).__init__(triples,graphPatterns) def __repr__(self): if self.graphPatterns is not None: return "OPTIONAL {%s %s}"%(self.triples,self.graphPatterns) else: return "OPTIONAL {%s}"%self.triples class ParsedAlternativeGraphPattern(object): """ A union graph pattern is a set of group graph patterns GPi. A union graph pattern matches a graph G with solution S if there is some GPi such that GPi matches G with solution S. """ def __init__(self,alternativePatterns): self.alternativePatterns = alternativePatterns def __repr__(self): return " UNION ".join(["{%s}"%g for g in self.alternativePatterns]) def __iter__(self): for g in self.alternativePatterns: yield g def __len__(self): return len(self.alternativePatterns) class ParsedGraphGraphPattern(ParsedGroupGraphPattern): """ Patterns on Named Graphs, where patterns are matched against named graphs """ def __init__(self,graphName,groupGraphPattern): self.name = graphName self.triples=groupGraphPattern.triples self.graphPatterns = groupGraphPattern.graphPatterns def __repr__(self): return "GRAPH %s { %s }"%(self.name,self.graphPatterns) rdflib-2.4.2/rdflib/sparql/bison/IRIRef.py0000644000175000017500000000073311153616034017236 0ustar nachonacho""" DatasetClause ::= 'FROM' ( IRIref | 'NAMED' IRIref ) See: http://www.w3.org/TR/rdf-sparql-query/#specifyingDataset 'A SPARQL query may specify the dataset to be used for matching. The FROM clauses give IRIs that the query processor can use to create the default graph and the FROM NAMED clause can be used to specify named graphs. ' """ from rdflib import URIRef class IRIRef(URIRef): pass class RemoteGraph(URIRef): pass class NamedGraph(IRIRef): pass rdflib-2.4.2/rdflib/sparql/bison/FunctionLibrary.py0000644000175000017500000000362511153616034021273 0ustar nachonacho""" [28] FunctionCall ::= IRIref ArgList http://www.w3.org/TR/rdf-sparql-query/#evaluation """ from Util import ListRedirect STR = 0 LANG = 1 LANGMATCHES = 2 DATATYPE = 3 BOUND = 4 isIRI = 5 isURI = 6 isBLANK = 7 isLITERAL = 8 FUNCTION_NAMES = { STR : 'STR', LANG : 'LANG', LANGMATCHES : 'LANGMATCHES', DATATYPE : 'DATATYPE', BOUND : 'BOUND', isIRI : 'isIRI', isURI : 'isURI', isBLANK : 'isBLANK', isLITERAL : 'isLITERAL', } class FunctionCall(object): def __init__(self,name,arguments=None): self.name = name self.arguments = arguments is None and [] or arguments def __repr__(self): return "%s(%s)"%(self.name,','.join([isinstance(i,ListRedirect) and i.reduce() or i for i in self.arguments])) class ParsedArgumentList(ListRedirect): def __init__(self,arguments): self._list = arguments class ParsedREGEXInvocation(object): def __init__(self,arg1,arg2,arg3=None): self.arg1 = arg1 self.arg2 = arg2 self.arg3 = arg3 def __repr__(self): return "REGEX(%s,%s%s)"%( isinstance(self.arg1,ListRedirect) and self.arg1.reduce() or self.arg1, isinstance(self.arg2,ListRedirect) and self.arg2.reduce() or self.arg2, isinstance(self.arg3,ListRedirect) and self.arg3.reduce() or self.arg3,) class BuiltinFunctionCall(FunctionCall): def __init__(self,name,arg1,arg2=None): if arg2: arguments = [arg1,arg2] else: arguments = [arg1] super(BuiltinFunctionCall,self).__init__(name,arguments) def __repr__(self): #print self.name #print [type(i) for i in self.arguments] return "%s(%s)"%(FUNCTION_NAMES[self.name],','.join([isinstance(i,ListRedirect) and str(i.reduce()) or repr(i) for i in self.arguments]))rdflib-2.4.2/rdflib/sparql/bison/Query.py0000644000175000017500000000703611153616034017266 0ustar nachonachofrom rdflib.sparql.bison.GraphPattern import GraphPattern class Query(object): """ Query ::= Prolog ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery ) See: http://www.w3.org/TR/rdf-sparql-query/#rQuery """ def __init__(self,prolog,query): self.prolog = prolog self.query = query def __repr__(self): return repr(self.query) class WhereClause(object): """ The where clause is essentially a wrapper for an instance of a ParsedGraphPattern """ def __init__(self,parsedGraphPattern): self.parsedGraphPattern = parsedGraphPattern class SelectQuery(object): """ SelectQuery ::= 'SELECT' 'DISTINCT'? ( Var+ | '*' ) DatasetClause* WhereClause SolutionModifier See: http://www.w3.org/TR/rdf-sparql-query/#rSelectQuery """ def __init__(self,variables,dataSetList,whereClause,solutionModifier,distinct=None): self.variables = variables is not None and variables or [] self.dataSets = dataSetList and dataSetList or [] self.whereClause = whereClause self.solutionModifier = solutionModifier self.distinct = distinct is not None def __repr__(self): return "SELECT %s %s %s %s %s"%(self.distinct and 'DISTINCT' or '',self.variables and self.variables or '*',self.dataSets,self.whereClause.parsedGraphPattern,self.solutionModifier and self.solutionModifier or '') class AskQuery(object): """ AskQuery ::= 'ASK' DatasetClause* WhereClause See: http://www.w3.org/TR/rdf-sparql-query/#rAskQuery """ def __init__(self,dataSetList,whereClause): self.dataSets = dataSetList and dataSetList or [] self.whereClause = whereClause def __repr__(self): return "ASK %s %s"%(self.dataSets,self.whereClause.parsedGraphPattern) class ConstructQuery(object): """ ConstructQuery ::= 'CONSTRUCT' ConstructTemplate DatasetClause* WhereClause SolutionModifier See: http://www.w3.org/TR/rdf-sparql-query/#rConstructQuery """ def __init__(self,triples,dataSetList,whereClause,solutionModifier): self.triples = GraphPattern(triples=triples) self.dataSets = dataSetList and dataSetList or [] self.whereClause = whereClause self.solutionModifier = solutionModifier class DescribeQuery(object): """ DescribeQuery ::= 'DESCRIBE' ( VarOrIRIref+ | '*' ) DatasetClause* WhereClause? SolutionModifier http://www.w3.org/TR/rdf-sparql-query/#rConstructQuery """ def __init__(self,variables,dataSetList,whereClause,solutionModifier): self.describeVars = variables is not None and variables or [] self.dataSets = dataSetList and dataSetList or [] self.whereClause = whereClause self.solutionModifier = solutionModifier def __repr__(self): return "DESCRIBE %s %s %s %s"%( self.describeVars, self.dataSets, self.whereClause.parsedGraphPattern, self.solutionModifier) class Prolog(object): """ Prolog ::= BaseDecl? PrefixDecl* See: http://www.w3.org/TR/rdf-sparql-query/#rProlog """ def __init__(self,baseDeclaration,prefixDeclarations): self.baseDeclaration = baseDeclaration self.extensionFunctions={} self.prefixBindings = {} if prefixDeclarations: for prefixBind in prefixDeclarations: if hasattr(prefixBind,'base'): self.prefixBindings[prefixBind.qName] = prefixBind.base def __repr__(self): return repr(self.prefixBindings) rdflib-2.4.2/rdflib/sparql/bison/Operators.py0000644000175000017500000000230311153616034020127 0ustar nachonachofrom Util import ListRedirect class BinaryOperator(object): NAME = '' def __init__(self,left,right): self.left = left self.right = right def __repr__(self): return "(%s %s %s)"%( isinstance(self.left,ListRedirect) and self.left.reduce() or self.left, self.NAME, isinstance(self.right,ListRedirect) and self.right.reduce() or self.right) class EqualityOperator(BinaryOperator): NAME = '=' class NotEqualOperator(BinaryOperator): NAME = '!=' class LessThanOperator(BinaryOperator): NAME = '<' class LessThanOrEqualOperator(BinaryOperator): NAME = '>=' class GreaterThanOperator(BinaryOperator): NAME = '>' class GreaterThanOrEqualOperator(BinaryOperator): NAME = '>=' class UnaryOperator(object): NAME = '' def __init__(self,argument): self.argument = argument def __repr__(self): return "(%s %s)"%( self.NAME, isinstance(self.argument,ListRedirect) and self.argument.reduce() or self.argument) class LogicalNegation(UnaryOperator): NAME = '!' class NumericPositive(UnaryOperator): NAME = '+' class NumericNegative(UnaryOperator): NAME = '-' rdflib-2.4.2/rdflib/sparql/bison/SPARQLEvaluate.py0000644000175000017500000003547511153616034020662 0ustar nachonacho### Utilities for evaluating a parsed SPARQL expression using sparql-p import rdflib from rdflib.sparql import sparqlGraph, sparqlOperators, SPARQLError from rdflib.sparql.sparqlOperators import getValue from rdflib.sparql.graphPattern import BasicGraphPattern from rdflib.sparql.Unbound import Unbound from rdflib.sparql.Query import _variablesToArray, queryObject, SessionBNode from rdflib.Graph import ConjunctiveGraph, Graph, BackwardCompatGraph,ReadOnlyGraphAggregate from rdflib import URIRef,Variable,BNode, Literal, plugin, RDF from rdflib.store import Store from rdflib.Identifier import Identifier from rdflib.Literal import XSDToPython from IRIRef import NamedGraph,RemoteGraph from GraphPattern import * from Resource import * from Triples import ParsedConstrainedTriples from QName import * from Expression import * from Util import ListRedirect from Operators import * from FunctionLibrary import * from SolutionModifier import ASCENDING_ORDER from Query import AskQuery, SelectQuery DEBUG = False BinaryOperatorMapping = { LessThanOperator : 'sparqlOperators.lt(%s,%s)%s', EqualityOperator : 'sparqlOperators.eq(%s,%s)%s', NotEqualOperator : 'sparqlOperators.neq(%s,%s)%s', LessThanOrEqualOperator : 'sparqlOperators.le(%s,%s)%s', GreaterThanOperator : 'sparqlOperators.gt(%s,%s)%s', GreaterThanOrEqualOperator : 'sparqlOperators.ge(%s,%s)%s', } UnaryOperatorMapping = { LogicalNegation : 'not(%s)', NumericNegative : '-(%s)', } CAMEL_CASE_BUILTINS = { 'isuri':'sparqlOperators.isURI', 'isiri':'sparqlOperators.isIRI', 'isblank':'sparqlOperators.isBlank', 'isliteral':'sparqlOperators.isLiteral', } def convertTerm(term,queryProlog): """ Utility function for converting parsed Triple components into Unbound """ if isinstance(term,Variable): return term elif isinstance(term,BNode): return term elif isinstance(term,QName): #QNames and QName prefixes are the same in the grammar if not term.prefix: if queryProlog is None: return URIRef(term.localname) else: base = queryProlog.baseDeclaration and queryProlog.baseDeclaration or\ queryProlog.prefixBindings[u''] return URIRef(base + term.localname) elif term.prefix == '_': #Told BNode See: http://www.w3.org/2001/sw/DataAccess/issues#bnodeRef import warnings warnings.warn("The verbatim interpretation of explicit bnode identifiers is contrary to (current) DAWG stance",SyntaxWarning) return SessionBNode(term.localname) else: return URIRef(queryProlog.prefixBindings[term.prefix] + term.localname) elif isinstance(term,QNamePrefix): if queryProlog is None: return URIRef(term) else: if queryProlog.baseDeclaration is None: return URIRef(term) return URIRef(queryProlog.baseDeclaration + term) elif isinstance(term,ParsedString): return Literal(term) elif isinstance(term,ParsedDatatypedLiteral): dT = term.dataType if isinstance(dT,QName): dT = convertTerm(dT,queryProlog) return Literal(term.value,datatype=dT) else: return term def unRollCollection(collection,queryProlog): nestedComplexTerms = [] listStart = convertTerm(collection.identifier,queryProlog) if not collection._list: yield (listStart,RDF.rest,RDF.nil) elif len(collection._list) == 1: singleItem = collection._list[0] if isinstance(singleItem,RDFTerm): nestedComplexTerms.append(singleItem) yield (listStart,RDF.first,convertTerm(singleItem.identifier,queryProlog)) else: yield (listStart,RDF.first,convertTerm(singleItem,queryProlog)) yield (listStart,RDF.rest,RDF.nil) else: singleItem = collection._list[0] if isinstance(singleItem,Identifier): singleItem=singleItem else: singleItem=singleItem.identifier yield (listStart,RDF.first,convertTerm(singleItem,queryProlog)) prevLink = listStart for colObj in collection._list[1:]: linkNode = convertTerm(BNode(),queryProlog) if isinstance(colObj,RDFTerm): nestedComplexTerms.append(colObj) yield (linkNode,RDF.first,convertTerm(colObj.identifier,queryProlog)) else: yield (linkNode,RDF.first,convertTerm(colObj,queryProlog)) yield (prevLink,RDF.rest,linkNode) prevLink = linkNode yield (prevLink,RDF.rest,RDF.nil) for additionalItem in nestedComplexTerms: for item in unRollRDFTerm(additionalItem,queryProlog): yield item def unRollRDFTerm(item,queryProlog): nestedComplexTerms = [] for propVal in item.propVals: for propObj in propVal.objects: if isinstance(propObj,RDFTerm): nestedComplexTerms.append(propObj) yield (convertTerm(item.identifier,queryProlog), convertTerm(propVal.property,queryProlog), convertTerm(propObj.identifier,queryProlog)) else: yield (convertTerm(item.identifier,queryProlog), convertTerm(propVal.property,queryProlog), convertTerm(propObj,queryProlog)) if isinstance(item,ParsedCollection): for rt in unRollCollection(item,queryProlog): yield rt for additionalItem in nestedComplexTerms: for item in unRollRDFTerm(additionalItem,queryProlog): yield item def unRollTripleItems(items,queryProlog): """ Takes a list of Triples (nested lists or ParsedConstrainedTriples) and (recursively) returns a generator over all the contained triple patterns """ if isinstance(items,RDFTerm): for item in unRollRDFTerm(items,queryProlog): yield item elif isinstance(items,ParsedConstrainedTriples): assert isinstance(items.triples,list) for item in items.triples: if isinstance(item,RDFTerm): for i in unRollRDFTerm(item,queryProlog): yield i else: for i in unRollTripleItems(item,queryProlog): yield item else: for item in items: if isinstance(item,RDFTerm): for i in unRollRDFTerm(item,queryProlog): yield i else: for i in unRollTripleItems(item,queryProlog): yield item def mapToOperator(expr,prolog,combinationArg=None,constraint=False): """ Reduces certain expressions (operator expressions, function calls, terms, and combinator expressions) into strings of their Python equivalent """ #print expr, type(expr), constraint combinationInvokation = combinationArg and '(%s)'%combinationArg or "" if isinstance(expr,ListRedirect): expr = expr.reduce() if isinstance(expr,UnaryOperator): return UnaryOperatorMapping[type(expr)]%( mapToOperator(expr.argument,prolog,combinationArg,constraint=constraint)) elif isinstance(expr,BinaryOperator): return BinaryOperatorMapping[type(expr)]%( mapToOperator(expr.left,prolog,combinationArg,constraint=constraint), mapToOperator(expr.right,prolog,combinationArg,constraint=constraint), combinationInvokation) elif isinstance(expr,(Variable,Unbound)): if constraint: return """sparqlOperators.EBV(rdflib.Variable("%s"))%s"""%(expr.n3(),combinationInvokation) else: return '"?%s"'%expr elif isinstance(expr,ParsedREGEXInvocation): return 'sparqlOperators.regex(%s,%s%s)%s'%( mapToOperator(expr.arg1,prolog,combinationArg,constraint=constraint), mapToOperator(expr.arg2,prolog,combinationArg,constraint=constraint), expr.arg3 and ',"'+expr.arg3 + '"' or '', combinationInvokation) elif isinstance(expr,BuiltinFunctionCall): normBuiltInName = FUNCTION_NAMES[expr.name].lower() normBuiltInName = CAMEL_CASE_BUILTINS.get(normBuiltInName,'sparqlOperators.'+normBuiltInName) return "%s(%s)%s"%(normBuiltInName,",".join( [mapToOperator(i,prolog,combinationArg,constraint=constraint) \ for i in expr.arguments]),combinationInvokation) elif isinstance(expr,ParsedDatatypedLiteral): lit = Literal(expr.value,datatype=convertTerm(expr.dataType,prolog)) if constraint: return """sparqlOperators.EBV(%r)%s"""%(lit,combinationInvokation) else: return repr(lit) elif isinstance(expr,Literal): return repr(expr) elif isinstance(expr,URIRef): import warnings warnings.warn("There is the possibility of __repr__ being deprecated in python3K",DeprecationWarning,stacklevel=3) return repr(expr) elif isinstance(expr,(QName,basestring)): return "'%s'"%convertTerm(expr,prolog) elif isinstance(expr,ParsedAdditiveExpressionList): return 'Literal(%s)'%(sparqlOperators.addOperator( [mapToOperator(item,prolog,combinationArg='i',constraint=constraint) \ for item in expr],combinationArg)) elif isinstance(expr,FunctionCall): if isinstance(expr.name,QName): fUri = convertTerm(expr.name,prolog) if fUri in XSDToPython: return "sparqlOperators.XSDCast(%s,'%s')%s"%( mapToOperator(expr.arguments[0],prolog,combinationArg='i',constraint=constraint), fUri, combinationInvokation) #@@FIXME The hook for extension functions goes here if fUri not in prolog.extensionFunctions: import warnings warnings.warn("Use of unregistered extension function: %s"%(fUri),UserWarning,1) else: raise NotImplemented("Extension Mechanism hook not yet completely hooked up..") #raise Exception("Whats do i do with %s (a %s)?"%(expr,type(expr).__name__)) else: if isinstance(expr,ListRedirect): expr = expr.reduce() if expr.pyBooleanOperator: return expr.pyBooleanOperator.join( [mapToOperator(i,prolog,constraint=constraint) for i in expr]) raise Exception("What do i do with %s (a %s)?"%(expr,type(expr).__name__)) def createSPARQLPConstraint(filter,prolog): """ Takes an instance of either ParsedExpressionFilter or ParsedFunctionFilter and converts it to a sparql-p operator by composing a python string of lambda functions and SPARQL operators This string is then evaluated to return the actual function for sparql-p """ reducedFilter = isinstance(filter.filter,ListRedirect) and filter.filter.reduce() or filter.filter if prolog.DEBUG: print reducedFilter,type(reducedFilter) if isinstance(reducedFilter,(ListRedirect, BinaryOperator, UnaryOperator, BuiltinFunctionCall, ParsedREGEXInvocation)): if isinstance(reducedFilter,UnaryOperator) and\ isinstance(reducedFilter.argument,Variable): const = True # elif isinstance(reducedFilter,ParsedRelationalExpressionList) and\ # False: # pass else: const = False else: const = True if isinstance(reducedFilter,ParsedConditionalAndExpressionList): combinationLambda = 'lambda(i): %s'%(' or '.join( ['%s'%mapToOperator(expr,prolog,combinationArg='i',constraint=const) \ for expr in reducedFilter])) if prolog.DEBUG: print "sparql-p operator(s): %s"%combinationLambda return eval(combinationLambda) elif isinstance(reducedFilter,ParsedRelationalExpressionList): combinationLambda = 'lambda(i): %s'%(' and '.join( ['%s'%mapToOperator(expr,prolog,combinationArg='i',constraint=const) \ for expr in reducedFilter])) if prolog.DEBUG: print "sparql-p operator(s): %s"%combinationLambda return eval(combinationLambda) elif isinstance(reducedFilter,BuiltinFunctionCall): rt=mapToOperator(reducedFilter,prolog,constraint=const) if prolog.DEBUG: print "sparql-p operator(s): %s"%rt return eval(rt) elif isinstance(reducedFilter,(ParsedAdditiveExpressionList,UnaryOperator,FunctionCall)): rt='lambda(i): %s'%( mapToOperator(reducedFilter,prolog,combinationArg='i',constraint=const)) if prolog.DEBUG: print "sparql-p operator(s): %s"%rt return eval(rt) elif isinstance(reducedFilter,Variable): rt = """sparqlOperators.EBV(rdflib.Variable("%s"))"""%reducedFilter.n3() if prolog.DEBUG: print "sparql-p operator(s): %s"%rt return eval(rt) # # reducedFilter = BuiltinFunctionCall(BOUND,reducedFilter) # rt=mapToOperator(reducedFilter,prolog) # if prolog.DEBUG: # print "sparql-p operator(s): %s"%rt # return eval(rt) else: if reducedFilter == u'true' or reducedFilter == u'false': def trueFn(arg): return True def falseFn(arg): return False return reducedFilter == u'true' and trueFn or falseFn rt=mapToOperator(reducedFilter, prolog, constraint=const) if prolog.DEBUG: print "sparql-p operator(s): %s"%rt return eval(rt) def isTriplePattern(nestedTriples): """ Determines (recursively) if the BasicGraphPattern contains any Triple Patterns returning a boolean flag indicating if it does or not """ if isinstance(nestedTriples,list): for i in nestedTriples: if isTriplePattern(i): return True return False elif isinstance(nestedTriples,ParsedConstrainedTriples): if nestedTriples.triples: return isTriplePattern(nestedTriples.triples) else: return False elif isinstance(nestedTriples,ParsedConstrainedTriples) and not nestedTriples.triples: return isTriplePattern(nestedTriples.triples) else: return True CONSTRUCT_NOT_SUPPORTED = 1 ExceptionMessages = { CONSTRUCT_NOT_SUPPORTED : '"Construct" is not (currently) supported', } class NotImplemented(Exception): def __init__(self,code,msg): self.code = code self.msg = msg def __str__(self): return ExceptionMessages[self.code] + ' :' + self.msg rdflib-2.4.2/rdflib/sparql/bison/QName.py0000644000175000017500000000077011153616034017160 0ustar nachonachofrom rdflib import URIRef from rdflib.Identifier import Identifier class QName(Identifier): __slots__ = ("localname", "prefix") def __new__(cls,value): try: inst = unicode.__new__(cls,value) except UnicodeDecodeError: inst = unicode.__new__(cls,value,'utf-8') inst.prefix,inst.localname = value.split(':') return inst class QNamePrefix(Identifier): def __init__(self,prefix): super(QNamePrefix,self).__init__(prefix) rdflib-2.4.2/rdflib/sparql/bison/Processor.py0000644000175000017500000000334011153616034020132 0ustar nachonachofrom rdflib import sparql import SPARQLParserc as SPARQLParser def CreateSPARQLParser(): return SPARQLParser.new() def Parse(query,debug = False): p = CreateSPARQLParser() if debug: try: p.debug_mode(1) except: p.debug = 1 if not isinstance(query, unicode): query = unicode(query,'utf-8') return p.parse(query) class Processor(sparql.Processor): def __init__(self, graph): self.graph = graph def query(self, strOrQuery, initBindings={}, initNs={}, DEBUG=False, dataSetBase=None, extensionFunctions={}): from rdflib.sparql.bison.Query import Query, Prolog from rdflib.sparql.Algebra import TopEvaluate assert isinstance(strOrQuery, (basestring, Query)),"%s must be a string or an rdflib.sparql.bison.Query.Query instance"%strOrQuery if isinstance(strOrQuery, basestring): strOrQuery = Parse(strOrQuery, DEBUG) if not strOrQuery.prolog: strOrQuery.prolog = Prolog(None, []) strOrQuery.prolog.prefixBindings.update(initNs) else: for prefix, nsInst in initNs.items(): if prefix not in strOrQuery.prolog.prefixBindings: strOrQuery.prolog.prefixBindings[prefix] = nsInst global prolog prolog = strOrQuery.prolog return TopEvaluate(strOrQuery, self.graph, initBindings, DEBUG=DEBUG, dataSetBase=dataSetBase, extensionFunctions=extensionFunctions) rdflib-2.4.2/rdflib/sparql/bison/Expression.py0000644000175000017500000000540511153616034020316 0ustar nachonachofrom Util import ListRedirect class ParsedConditionalAndExpressionList(ListRedirect): """ A list of ConditionalAndExpressions, joined by '||' """ pyBooleanOperator = ' or ' def __init__(self,conditionalAndExprList): if isinstance(conditionalAndExprList,list): self._list = conditionalAndExprList else: self._list = [conditionalAndExprList] def __repr__(self): return ""%self._list class ParsedRelationalExpressionList(ListRedirect): """ A list of RelationalExpressions, joined by '&&'s """ pyBooleanOperator = ' and ' def __init__(self,relationalExprList): if isinstance(relationalExprList,list): self._list = relationalExprList else: self._list = [relationalExprList] def __repr__(self): return ""%self._list class ParsedPrefixedMultiplicativeExpressionList(ListRedirect): """ A ParsedMultiplicativeExpressionList lead by a '+' or '-' """ def __init__(self,prefix,mulExprList): self.prefix = prefix assert prefix != '-',"arithmetic '-' operator not supported" if isinstance(mulExprList,list): self._list = mulExprList else: self._list = [mulExprList] def __repr__(self): return "%s %s"%(self.prefix,self.reduce()) class ParsedMultiplicativeExpressionList(ListRedirect): """ A list of UnaryExpressions, joined by '/' or '*' s """ def __init__(self,unaryExprList): if isinstance(unaryExprList,list): self._list = unaryExprList else: self._list = [unaryExprList] def __repr__(self): return ""%self.reduce() class ParsedAdditiveExpressionList(ListRedirect): """ A list of MultiplicativeExpressions, joined by '+' or '-' s """ def __init__(self,multiplicativeExprList): if isinstance(multiplicativeExprList,list): self._list = multiplicativeExprList else: self._list = [multiplicativeExprList] def __repr__(self): return ""%self._list class ParsedString(unicode): def __init__(self,value=None): val = value is None and u"" or value super(ParsedString,self).__init__(val) class ParsedDatatypedLiteral(object): """ Placeholder for Datatyped literals This is neccessary (instead of instanciating Literals directly) when datatypes IRIRefs are QNames (in which case the prefix needs to be resolved at some point) """ def __init__(self,value,dType): self.value = value self.dataType = dType def __repr__(self): return "'%s'^^%s"%(self.value,self.dataType)rdflib-2.4.2/rdflib/sparql/bison/Filter.py0000644000175000017500000000063111153616034017400 0ustar nachonachofrom Util import ListRedirect class ParsedFilter(object): def __init__(self,filter): self.filter = filter def __repr__(self): return "FILTER %s"%self.filter class ParsedExpressionFilter(ParsedFilter): def __repr__(self): return "FILTER %s"%(isinstance(self.filter,ListRedirect) and self.filter.reduce() or self.filter) class ParsedFunctionFilter(ParsedFilter): passrdflib-2.4.2/rdflib/sparql/bison/__init__.py0000644000175000017500000000005611153616034017713 0ustar nachonachofrom Processor import CreateSPARQLParser,Parserdflib-2.4.2/rdflib/sparql/bison/Triples.py0000644000175000017500000000105411153616034017575 0ustar nachonachoclass PropertyValue(object): def __init__(self,property,objects): self.property = property self.objects = objects #print def __repr__(self): return "%s(%s)"%(self.property,self.objects) class ParsedConstrainedTriples(object): """ A list of Resources associated with a constraint """ def __init__(self,triples,constraint): self.triples = triples self.constraint = constraint def __repr__(self): return "%s%s"%(self.triples,self.constraint and ' %s'%self.constraint or '')rdflib-2.4.2/rdflib/sparql/bison/Resource.py0000644000175000017500000000321111153616034017737 0ustar nachonachofrom rdflib import URIRef, BNode from Util import ListRedirect from sets import Set class RDFTerm(object): """ Common class for RDF terms """ class Resource(RDFTerm): """ Represents a sigle resource in a triple pattern. It consists of an identifier (URIReff or BNode) and a list of rdflib.sparql.bison.Triples.PropertyValue instances """ def __init__(self,identifier=None,propertyValueList=None): self.identifier = identifier is not None and identifier or BNode() self.propVals = propertyValueList is not None and propertyValueList or [] def __repr__(self): resId = isinstance(self.identifier,BNode) and '_:'+self.identifier or self.identifier #print type(self.identifier) return "%s%s"%(resId,self.propVals and ' %s'%self.propVals or '') def extractPatterns(self) : for prop,objs in self.propVals: for obj in objs: yield (self.identifier,prop,obj) class TwiceReferencedBlankNode(RDFTerm): """ Represents BNode in triple patterns in this form: [ :prop1 :val1 ] :prop2 :val2 """ def __init__(self,props1,props2): self.identifier = BNode() self.propVals = list(Set(props1+props2)) class ParsedCollection(ListRedirect,RDFTerm): """ An RDF Collection """ reducable = False def __init__(self,graphNodeList): self.identifier = BNode() self.propVals = [] self._list = graphNodeList def setPropertyValueList(self,propertyValueList): self.propVals = propertyValueList def __repr__(self): return ""%self._list rdflib-2.4.2/rdflib/sparql/bison/Bindings.py0000644000175000017500000000121511153616034017707 0ustar nachonachofrom rdflib import URIRef, Namespace EMPTY_STRING="" class PrefixDeclaration(object): """ PrefixDecl ::= 'PREFIX' QNAME_NS Q_IRI_REF See: http://www.w3.org/TR/rdf-sparql-query/#rPrefixDecl """ def __init__(self,qName,iriRef): self.namespaceMapping = Namespace(iriRef) self.qName = qName[:-1] self.base = iriRef #print self.base,self.qName,self.namespaceMapping.knows def __repr__(self): return "%s -> %s"%(self.base,self.qName[:-1]) class BaseDeclaration(URIRef): """ BaseDecl ::= 'BASE' Q_IRI_REF See: http://www.w3.org/TR/rdf-sparql-query/#rBaseDecl """ pass rdflib-2.4.2/rdflib/sparql/sparqlGraph.py0000644000175000017500000001231411153616034017326 0ustar nachonachofrom rdflib.Graph import Graph, ConjunctiveGraph class SPARQLGraph(object): """ A subclass of Graph with a few extra SPARQL bits. """ SPARQL_DATASET=0 NAMED_GRAPH=1 __slots__ = ("graphVariable","DAWG_DATASET_COMPLIANCE","identifier","graphKind","graph") def __init__(self, graph, graphVariable = None, dSCompliance = False): assert not graphVariable or graphVariable[0]!='?',repr(graphVariable) self.graphVariable = graphVariable self.DAWG_DATASET_COMPLIANCE = dSCompliance self.graphKind=None if graph is not None: self.graph = graph # TODO #self.store = graph.store if isinstance(graph,ConjunctiveGraph): self.graphKind = self.SPARQL_DATASET self.identifier = graph.default_context.identifier else: self.graphKind = self.NAMED_GRAPH self.identifier = graph.identifier #super(SPARQLGraph, self).__init__(store, identifier) def setupGraph(self,store,graphKind=None): gKind = graphKind and graphKind or self.graphKind self.graph = gKind(store,self.identifier) def __reduce__(self): return (SPARQLGraph, (None, self.graphVariable, self.DAWG_DATASET_COMPLIANCE), self.__getstate__()) def __getstate__(self): return (self.graphVariable, self.DAWG_DATASET_COMPLIANCE, self.identifier)#, #self.graphKind) def __setstate__(self, arg): #gVar,flag,identifier,gKind=arg gVar,flag,identifier=arg self.graphVariable=gVar self.DAWG_DATASET_COMPLIANCE=flag self.identifier=identifier #self.graphKind=gKind #self.graph=Graph(store,identifier) ############################################################################################################## # Clustering methods def _clusterForward(self,seed,Cluster) : """Cluster the triple store: from a seed, transitively get all properties and objects in direction of the arcs. @param seed: RDFLib Resource @param Cluster: a L{sparqlGraph} instance, that has to be expanded with the new arcs """ try : # get all predicate and object pairs for the seed. # *If not yet in the new cluster, then go with a recursive round with those* for (p,o) in self.graph.predicate_objects(seed) : if not (seed,p,o) in Cluster.graph : Cluster.add((seed,p,o)) self._clusterForward(p,Cluster) self._clusterForward(o,Cluster) except : pass def clusterForward(self,seed,Cluster=None) : """ Cluster the triple store: from a seed, transitively get all properties and objects in direction of the arcs. @param seed: RDFLib Resource @param Cluster: another sparqlGraph instance; if None, a new one will be created. The subgraph will be added to this graph. @returns: The triple store containing the cluster @rtype: L{sparqlGraph} """ if Cluster == None : Cluster = SPARQLGraph() # This will raise an exception if not kosher... check_subject(seed) #print "Wrong type for clustering (probably a literal): %s" % seed self._clusterForward(seed,Cluster) return Cluster def _clusterBackward(self,seed,Cluster) : """Cluster the triple store: from a seed, transitively get all properties and objects in backward direction of the arcs. @param seed: RDFLib Resource @param Cluster: a L{sparqlGraph} instance, that has to be expanded with the new arcs """ try : for (s,p) in self.graph.subject_predicates(seed) : if not (s,p,seed) in Cluster.graph : Cluster.add((s,p,seed)) self._clusterBackward(s,Cluster) self._clusterBackward(p,Cluster) except : pass def clusterBackward(self,seed,Cluster=None) : """ Cluster the triple store: from a seed, transitively get all properties and objects 'backward', ie, following the link back in the graph. @param seed: RDFLib Resource @param Cluster: another sparqlGraph instance; if None, a new one will be created. The subgraph will be added to this graph. @returns: The triple store containing the cluster @rtype: L{sparqlGraph} """ if Cluster == None : Cluster = SPARQLGraph() # This will raise an exception if not kosher... check_object(seed) # print "Wrong type for clustering: %s" % seed self._clusterBackward(seed,Cluster) return Cluster def cluster(self,seed) : """ Cluster up and down, by summing up the forward and backward clustering @param seed: RDFLib Resource @returns: The triple store containing the cluster @rtype: L{sparqlGraph} """ raise "Am I getting here?" return self.clusterBackward(seed) + self.clusterForward(seed) rdflib-2.4.2/rdflib/sparql/Query.py0000644000175000017500000015077611153616034016166 0ustar nachonachoimport types, sets from pprint import pprint from rdflib import URIRef, BNode, Literal, Variable, RDF from rdflib.Graph import Graph, ConjunctiveGraph, ReadOnlyGraphAggregate from rdflib.Identifier import Identifier from rdflib.util import check_subject, list2set from rdflib.sparql import SPARQLError from rdflib.sparql.sparqlGraph import SPARQLGraph from rdflib.sparql.graphPattern import GraphPattern class SessionBNode(BNode): """ Special 'session' BNodes. I.e., BNodes at the query side which refer to BNodes in persistence """ pass def _checkOptionals(pattern,optionals) : """ The following remark in the SPARQL document is important: 'If a new variable is mentioned in an optional block (as mbox and hpage are mentioned in the previous example), that variable can be mentioned in that block and can not be mentioned in a subsequent block.' What this means is that the various optional blocks do not interefere at this level and there is no need for a check whether a binding in a subsequent block clashes with an earlier optional block. This method checks whether this requirement is fulfilled. Raises a SPARQLError exception if it is not (the rest of the algorithm relies on this, so checking it is a good idea...) @param pattern: graph pattern @type pattern: L{GraphPattern} @param optionals: graph pattern @type optionals: L{GraphPattern} @raise SPARQLError: if the requirement is not fulfilled """ for i in xrange(0,len(optionals)) : for c in optionals[i].unbounds : if c in pattern.unbounds : # this is fine, an optional query variable can appear in the main pattern, too continue if i > 0 : for j in xrange(0,i) : if c in optionals[j].unbounds : # This means that: # - the variable is not in the main pattern (because the previous if would have taken care of it) # - the variable is in the previous optional: ie, Error! raise SPARQLError("%s is an illegal query string, it appear in a previous OPTIONAL clause" % c) def _variablesToArray(variables,name='') : """Turn an array of Variables or query strings into an array of query strings. If the 'variables' is in fact a single string or Variable, then it is also put into an array. @param variables: a string, a unicode, or a Variable, or an array of those (can be mixed, actually). As a special case, if the value is "*", it returns None (this corresponds to the wildcard in SPARQL) @param name: the string to be used in the error message """ if isinstance(variables,basestring) : if variables == "*" : return None else : return [variables] elif isinstance(variables,Variable) : return [variables] elif type(variables) == list or type(variables) == tuple : retval = [] for s in variables : if isinstance(s,basestring) : retval.append(s) elif isinstance(s,Variable) : retval.append(s) else : raise SPARQLError("illegal type in '%s'; must be a string, unicode, or a Variable" % name) else : raise SPARQLError("'%s' argument must be a string, a Variable, or a list of those" % name) return retval def _createInitialBindings(pattern) : """Creates an initial binding directory for the Graph Pattern by putting a None as a value for each query variable. @param pattern: graph pattern @type pattern: L{GraphPattern} """ bindings = {} for c in pattern.unbounds : bindings[c] = None return bindings def _ancestorTraversal(node,selected=False): if selected: yield node if node.parent: for i in _ancestorTraversal(node.parent,selected=True): yield i def _fetchBoundLeaves(node,previousBind=False,proxyTree=False): """ Takes a SPARQLNode and returns a generator over its bound leaves (including OPTIONAL proxies) """ isaProxyTree = proxyTree or node.priorLeftJoin if len(node.children) == 0 : if node.bound and not node.clash: #An OPTIONAL proxy is an expansion descendant which was #bound and valid (compatible) at a prior point and thus #serves as the cumulative context for all subsequent operations proxy=False for optChild in reduce(lambda x,y: x+y,[list(_fetchBoundLeaves(o,previousBind,isaProxyTree)) for o in node.optionalTrees],[]): proxy=True yield optChild if not proxy: yield node elif node.clash and previousBind and isaProxyTree: #prior evaluation of LeftJoin was successful but later became #excluded. Note, this should not provide any bindings yield node else : for c in node.children : for proxy in _fetchBoundLeaves(c,previousBind,isaProxyTree): yield proxy def isGroundQuad(quad): for term in quad: if isinstance(term,Variable): return False return True class _SPARQLNode(object): """ The SPARQL implementation is based on the creation of a tree, each level for each statement in the 'where' clause of SPARQL. Each node maintains a 'binding' dictionary, with the variable names and either a None if not yet bound, or the binding itself. The method 'expand' tries to make one more step of binding by looking at the next statement: it takes the statement of the current node, binds the variables if there is already a binding, and looks at the triple store for the possibilities. If it finds valid new triplets, that will bind some more variables, and children will be created with the next statement in the 'where' array with a new level of bindings. This is done for each triplet found in the store, thereby branching off the tree. If all variables are already bound but the statement, with the bound variables, is not 'true' (ie, there is no such triple in the store), the node is marked as 'clash' and no more expansion is made; this node will then be thrown away by the parent. If I{all} children of a node is a clash, then it is marked as a clash itself. At the end of the process, the leaves of the tree are searched; if a leaf is such that: - all variables are bound - there is no clash then the bindings are returned as possible answers to the query. The optional clauses are treated separately: each 'valid' leaf is assigned an array of expansion trees that contain the optional clauses (that may have some unbound variables bound at the leaf, though). @ivar parent: parent in the tree @type parent: _SPARQLNode @ivar children: the children (in an array) @type children: array of _SPARQLNode @ivar bindings: copy of the bindings locally @type bindings: dictionary @ivar statement: the current statement @type statement: a (s,p,o,f) tuple ('f' is the local filter or None) @ivar rest: the rest of the statements (an array) @ivar clash: intialized to False @type clash: Boolean @ivar bound: True or False depending on whether all variables are bound in self.binding @type bound: Boolean @ivar optionalTrees: expansion trees for optional statements @type optionalTrees: array of _SPARQLNode instances """ __slots__ = ("expr", "tripleStore", "bindings", "optionalTrees", "bound", "clash", "priorLeftJoin", "dontSpawn", "children", "parent", "statement", "rest") def __init__(self,parent,bindings,statements,tripleStore,expr=None) : """ @param parent: parent node @param bindings: a dictionary with the bindings that are already done or with None value if no binding yet @param statements: array of statements from the 'where' clause. The first element is for the current node, the rest for the children. If empty, then no expansion occurs (ie, the node is a leaf) @param tripleStore: the 'owner' triple store @type tripleStore: L{sparqlGraph} """ self.priorLeftJoin = False self.expr = expr self.tripleStore = tripleStore self.bindings = bindings self.optionalTrees = [] self.dontSpawn = False if None in bindings.values() : self.bound = False else : self.bound = True self.clash = False self.parent = parent self.children = [] if len(statements) > 0 : self.statement = statements[0] self.rest = statements[1:] else : self.statement = None self.rest = None def __reduce__(self): if self.statement: statements = [self.statement]+ self.rest else: statements=[] return (_SPARQLNode, (self.parent, self.bindings, statements, self.tripleStore, self.expr), self.__getstate__()) def __getstate__(self): if self.statement: statements = [self.statement]+ self.rest else: statements=[] return (self.clash, self.optionalTrees, self.children, self.parent, self.bindings, statements, self.tripleStore, self.expr, self.bound, self.priorLeftJoin, self.dontSpawn) def __setstate__(self, arg): clash,optionals,children,parent,bindings,statements,tripleStore,expr,bound,plj,spawn = arg #clash,optionals,children,proxy,spawn = arg self.priorLeftJoin = plj self.dontSpawn=spawn self.bound=bound self.clash=clash self.optionalTrees=optionals self.children=children self.parent=parent self.bindings=bindings if len(statements) > 0 : self.statement = statements[0] self.rest = statements[1:] else : self.statement = None self.rest = None self.tripleStore=tripleStore self.expr=expr def __repr__(self): return ""%(id(self), len(self.children), len(self.optionalTrees), self.clash, [k for k in self.bindings.keys() if self.bindings[k] is not None]) def setupGraph(self,store): self.tripleStore.setupGraph(store) def returnResult(self,select) : """ Collect the result by search the leaves of the the tree. The variables in the select are exchanged against their bound equivalent (if applicable). This action is done on the valid leaf nodes only, the intermediate nodes only gather the children's results and combine it in one array. @param select: the array of unbound variables in the original select that do not appear in any of the optionals. If None, the full binding should be considered (this is the case for the SELECT * feature of SPARQL) @returns: an array of dictionaries with non-None bindings. """ if len(self.children) > 0 : # combine all the results of all the kids into one array retval = [] for c in self.children : res = c.returnResult(select) # res is a list of dictionaries, so each tuple should be taken out and added to the result for t in res : retval.append(t) return retval else : retval = [] if self.bound == True and self.clash == False : # This node should be able to contribute to the final results # if it doesn't have any OPTIONAL proxies: result = {} #Determine if this node has an OPTIONAL 'proxy' proxies = [] if self.optionalTrees: proxies = reduce(lambda x,y: x+y,[list(_fetchBoundLeaves(o)) for o in self.optionalTrees],[]) # This where the essential happens: the binding values are used to construct the selection result # sparql-p fix: A node with valid optional expansion trees should not # contribute to bindings (the OPTIONAL expansion trees already account # for it's bindings) # see: http://chatlogs.planetrdf.com/swig/2007-06-07.html#T19-28-43 if not proxies: prevBound=reduce(lambda x,y: x+y, [list(_fetchBoundLeaves(o,previousBind=True)) for o in self.optionalTrees],[]) if self.optionalTrees and \ reduce(lambda x,y: x+y, [list(_fetchBoundLeaves(o,previousBind=True)) for o in self.optionalTrees],[]): pass elif select : for a in select : if a in self.bindings : result[a] = self.bindings[a] else : result = self.bindings.copy() # Initial return block. If there is no optional processing, that is the result, in fact, # because the for cycle below will not happen retval = [result] else: retval = reduce(lambda x,y: x+y,[o.returnResult(select) for o in proxies]) return retval def expandSubgraph(self,subTriples,pattern) : """ Method used to collect the results. There are two ways to invoke the method: - if the pattern argument is not None, then this means the construction of a separate triple store with the results. This means taking the bindings in the node, and constuct the graph via the L{construct} method. This happens on the valid leafs; intermediate nodes call the same method recursively - otherwise, a leaf returns an array of the bindings, and intermediate methods aggregate those. In both cases, leaf nodes may successifely expand the optional trees that they may have. @param subTriples: the triples so far @type subTriples: L{sparqlGraph} @param pattern: a graph pattern used to construct a graph @type pattern: L{GraphPattern} @return: if pattern is not None, an array of binding dictionaries """ def b(r,bind) : if type(r) == str : val = bind[r] if val == None : raise RuntimeError() return bind[r] else : return r if len(self.children) > 0 : # all children return an array of bindings (each element being a dictionary) if pattern == None : retval = reduce(lambda x,y: x+y, [x.expandSubgraph(subTriples,None) for x in self.children],[]) (s,p,o,func) = self.statement for bind in retval : try : st = (b(s,bind),b(p,bind),b(o,bind)) subTriples.add(st) except : # any exception means a None value creeping in, or something similar.. pass return retval else : for x in self.children : x.expandSubgraph(subTriples,pattern) else : # return the local bindings if any. Not the optional trees should be added, too! if self.bound == True and self.clash == False : # Get the possible optional branches: for t in self.optionalTrees : t.expandSubgraph(subTriples,pattern) if pattern == None : return [self.bindings] else : pattern.construct(subTriples,self.bindings) else : return [] def _bind(self,r) : """ @param r: string @return: returns None if no bindings occured yet, the binding otherwise """ if isinstance(r,basestring) and not isinstance(r,Identifier) or \ isinstance(r,Variable) : if self.bindings[r] == None : return None else : return self.bindings[r] elif isinstance(r,(SessionBNode)): return r elif isinstance(r,(BNode)): return self.bindings.get(r) else : return r def expand(self,constraints): if self.tripleStore.graph.store.batch_unification: patterns=[] if self.statement: for statement in [self.statement]+self.rest: (s,p,o,func) = statement searchTerms=[self._bind(term) is not None and \ self._bind(term) or term for term in [s,p,o]] (search_s,search_p,search_o) = searchTerms#(self._bind(s),self._bind(p),self._bind(o)) if self.tripleStore.graphVariable: graphName=self.bindings.get(self.tripleStore.graphVariable, self.tripleStore.graphVariable) elif self.tripleStore.DAWG_DATASET_COMPLIANCE and \ isinstance(self.tripleStore.graph,ConjunctiveGraph): #For query-constructed datasets, match against the 'default graph' - #the first Graph with a non-URIRef identifier (or an empty, default graph) if isinstance(self.tripleStore.graph,ReadOnlyGraphAggregate): graphName=None for g in self.tripleStore.graph.graphs: searchRT = [] if isinstance(g.identifier,BNode): graphName=g.identifier break if graphName is None: #No default graph was created and the active greaph #is supposed to be the default graph #so we should have no answers continue else: #match against the default graph graphName=self.tripleStore.graph.default_context.identifier elif isinstance(self.tripleStore.graph,ConjunctiveGraph): #match all graphs graphName = Variable(BNode()) else: #otherwise, the default graph is the graph queried graphName = self.tripleStore.graph.identifier patterns.append((search_s,search_p,search_o,graphName)) #expand at server, accumulating results rt=[] nonGroundPatterns=[pattern for pattern in patterns if not isGroundQuad(pattern)] if nonGroundPatterns: #Only evaluate at the server if not all the terms are ground for rtDict in self.tripleStore.graph.store.batch_unify(patterns): if self.tripleStore.graphVariable: if self.tripleStore.DAWG_DATASET_COMPLIANCE and \ isinstance(rtDict[self.tripleStore.graphVariable],BNode): #We can't match the default graph when the active #graph is set via the GRAPH expression continue rt.append(rtDict) # create a copy of the current bindings, by also adding the new ones from result of the search new_bindings = self.bindings.copy() new_bindings.update(rtDict) child = _SPARQLNode(self,new_bindings,[],self.tripleStore,expr=self.expr) self.children.append(child) assert not child.clash and child.bindings for func in constraints : try: if func(new_bindings) == False : child.clash = True break except TypeError: child.clash=True else: #If all the patterns are ground, there is no need #to invoke server-side unification (no variables to batch unify) self.expandAtClient(constraints) return if self.statement: if nonGroundPatterns and len(self.children) == 0: self.clash = True else: for func in constraints : try: if func(self.bindings) == False : self.clash = True break except TypeError: self.clash=True else: self.expandAtClient(constraints) def expandAtClient(self,constraints) : """ The expansion itself. See class comments for details. @param constraints: array of global constraining (filter) methods """ # if there are no more statements, that means that the constraints have been fully expanded if self.statement : # decompose the statement into subject, predicate and object # default setting for the search statement # see if subject (resp. predicate and object) is already bound. This # is done by taking over the content of self.dict if not None and replacing # the subject with that binding # the (search_subject,search_predicate,search_object) is then created (s,p,o,func) = self.statement # put the bindings we have so far into the statement; this may add None values, # but that is exactly what RDFLib uses in its own search methods! (search_s,search_p,search_o) = (self._bind(s),self._bind(p),self._bind(o)) #We need to keep track of the original Graph associated with the tripleStore #so we can switch back to it after changing the active graph (if we do) #otherwise we will effect later evaluations which use the same tripleStore instance originalGraph = None if self.tripleStore.graphVariable: if hasattr(self.tripleStore.graph,'quads'): if self.tripleStore.graphVariable not in self.bindings: searchRT = self.tripleStore.graph.quads((search_s, search_p, search_o)) else: graphName = self.bindings[self.tripleStore.graphVariable] assert not self.tripleStore.DAWG_DATASET_COMPLIANCE or\ isinstance(graphName,URIRef),\ "Cannot formally return graph name solutions for the default graph!" unifiedGraph = Graph(self.tripleStore.graph.store, identifier=graphName) originalGraph = self.tripleStore.graph self.tripleStore.graph = unifiedGraph searchRT = [(_s,_p,_o,unifiedGraph) for _s,_p,_o in \ unifiedGraph.triples((search_s,search_p,search_o))] else: assert not self.tripleStore.DAWG_DATASET_COMPLIANCE or\ isinstance(self.tripleStore.graph.identifier,URIRef),\ "Cannot formally return graph name solutions for the default graph" searchRT = [(_s,_p,_o,self.tripleStore.graph) for _s,_p,_o in self.tripleStore.graph.triples((search_s,search_p,search_o))] elif self.tripleStore.DAWG_DATASET_COMPLIANCE and \ isinstance(self.tripleStore.graph,ConjunctiveGraph): #For query-constructed datasets, match against the 'default graph' - #the first Graph with a non-URIRef identifier (or an empty, default graph) if isinstance(self.tripleStore.graph,ReadOnlyGraphAggregate): for g in self.tripleStore.graph.graphs: searchRT = [] if isinstance(g.identifier,BNode): searchRT = g.triples((search_s,search_p,search_o)) break else: #match against the default graph searchRT = self.tripleStore.graph.default_context.triples( (search_s, search_p, search_o)) else: #otherwise, the default graph is the graph queried searchRT = self.tripleStore.graph.triples((search_s,search_p,search_o)) if originalGraph: self.tripleStore.graph = originalGraph for tripleOrQuad in searchRT: if self.tripleStore.graphVariable: (result_s,result_p,result_o,parentGraph) = tripleOrQuad if isinstance(self.tripleStore.graph,ConjunctiveGraph) and \ self.tripleStore.DAWG_DATASET_COMPLIANCE and isinstance(parentGraph.identifier,BNode): continue assert isinstance(parentGraph.identifier,URIRef) else: (result_s,result_p,result_o) = tripleOrQuad # if a user defined constraint has been added, it should be checked now if func != None and func(result_s,result_p,result_o) == False : # Oops, this result is not acceptable, jump over it! continue # create a copy of the current bindings, by also adding the new ones from result of the search new_bindings = self.bindings.copy() queryTerms = [s,p,o] preClash = False for searchSlot,searchTerm,result in [(search_s,s,result_s), (search_p,p,result_p), (search_o,o,result_o)]: #searchSlot is what we searched with (variables become none) #searchTerm is the term in the triple pattern #result is the unified term from the dataset if searchSlot == None : #An unknown currBound = new_bindings.get(searchTerm) if currBound is not None: if currBound != result: preClash = True else: new_bindings[searchTerm] = result if self.tripleStore.graphVariable: new_bindings[self.tripleStore.graphVariable] = parentGraph.identifier # Recursion starts here: create and expand a new child child = _SPARQLNode(self,new_bindings,self.rest,self.tripleStore,expr=self.expr) if preClash: child.clash = True else: child.expand(constraints) # if the child is a clash then no use adding it to the tree, it can be forgotten if self.clash == False : self.children.append(child) if len(self.children) == 0 : # this means that the constraints could not be met at all with this binding!!!! self.clash = True else : # this is if all bindings are done; the conditions (ie, global constraints) are still to be checked if self.bound == True and self.clash == False : for func in constraints : try: if func(self.bindings) == False : self.clash = True break except TypeError: self.clash=True def expandOptions(self,bindings,statements,constraints) : """ Managing optional statements. These affect leaf nodes only, if they contain 'real' results. A separate Expansion tree is appended to such a node, one for each optional call. @param bindings: current bindings dictionary @param statements: array of statements from the 'where' clause. The first element is for the current node, the rest for the children. If empty, then no expansion occurs (ie, the node is a leaf). The bindings at this node are taken into account (replacing the unbound variables with the real resources) before expansion @param constraints: array of constraint (filter) methods """ def replace(key,resource,tupl) : s,p,o,func = tupl if key == s : s = resource if key == p : p = resource if key == o : o = resource return (s,p,o,func) if len(self.children) == 0 : # this is a leaf in the original expansion if self.bound == True and self.clash == False : # see if the optional bindings can be reduced because they are already # bound by this node toldBNodeLookup = {} for key in self.bindings : normalizedStatements = [] for t in statements: val = self.bindings[key] if isinstance(val,BNode) and val not in toldBNodeLookup: toldBNodeLookup[val] = val normalizedStatements.append(replace(key,self.bindings[key],t)) statements = normalizedStatements if key in bindings : del bindings[key] bindings.update(toldBNodeLookup) optTree = _SPARQLNode(None,bindings,statements,self.tripleStore,expr=self.expr) self.optionalTrees.append(optTree) optTree.expand(constraints) else : for c in self.children : c.expandOptions(bindings,statements,constraints) def _processResults(select,arr) : ''' The result in an expansion node is in the form of an array of binding dictionaries. The caller should receive an array of tuples, each tuple representing the final binding (or None) I{in the order of the original select}. This method is the last step of processing by processing these values to produce the right result. @param select: the original selection list. If None, then the binding should be taken as a whole (this corresponds to the SELECT * feature of SPARQL) @param arr: the array of bindings @type arr: an array of dictionaries @return: a list of tuples with the selection results ''' retval = [] if select : for bind in arr : # each result binding must be taken separately qresult = [] for s in select : if s in bind : qresult.append(bind[s]) else : qresult.append(None) # as a courtesy to the user, if the selection has one single element only, than we do no # put in a tuple, just add it that way: if len(select) == 1 : retval.append(qresult[0]) else : retval.append(tuple(qresult)) else : # this is the case corresponding to a SELECT * query call for bind in arr: qresult = [val for key,val in bind.items()] if len(qresult) == 1 : retval.append(qresult[0]) else : retval.append(tuple(qresult)) return retval def query(graph, selection, patterns, optionalPatterns=[], initialBindings = {}) : """ A shorthand for the creation of a L{Query} instance, returning the result of a L{Query.select} right away. Good for most of the usage, when no more action (clustering, etc) is required. @param selection: a list or tuple with the selection criteria, or a single string. Each entry is a string that begins with a"?". @param patterns: either a L{GraphPattern} instance or a list of instances thereof. Each pattern in the list represent an 'OR' (or 'UNION') branch in SPARQL. @param optionalPatterns: either a L{GraphPattern} instance or a list of instances thereof. For each elements in the 'patterns' parameter is combined with each of the optional patterns and the results are concatenated. The list may be empty. @return: list of query results @rtype: list of tuples """ result = queryObject(graph, patterns,optionalPatterns,initialBindings) if result == None : # generate some proper output for the exception :-) msg = "Errors in the patterns, no valid query object generated; " if isinstance(patterns,GraphPattern) : msg += ("pattern:\n%s" % patterns) else : msg += ("pattern:\n%s\netc..." % patterns[0]) raise SPARQLError(msg) return result.select(selection) def queryObject(graph, patterns, optionalPatterns=[], initialBindings = None) : """ Creation of a L{Query} instance. @param patterns: either a L{GraphPattern} instance or a list of instances thereof. Each pattern in the list represent an 'OR' (or 'UNION') branch in SPARQL. @param optionalPatterns: either a L{GraphPattern} instance or a list of instances thereof. For each elements in the 'patterns' parameter is combined with each of the optional patterns and the results are concatenated. The list may be empty. @return: Query object @rtype: L{Query} """ def checkArg(arg,error) : if arg == None : return [] elif isinstance(arg,GraphPattern) : return [arg] elif type(arg) == list or type(arg) == tuple : for p in arg : if not isinstance(p,GraphPattern) : raise SPARQLError("'%s' argument must be a GraphPattern or a list of those" % error) return arg else : raise SPARQLError("'%s' argument must be a GraphPattern or a list of those" % error) finalPatterns = checkArg(patterns,"patterns") finalOptionalPatterns = checkArg(optionalPatterns,"optionalPatterns") retval = None if not initialBindings: initialBinding = {} for pattern in finalPatterns : # Check whether the query strings in the optional clauses are fine. If a problem occurs, # an exception is raised by the function _checkOptionals(pattern,finalOptionalPatterns) bindings = _createInitialBindings(pattern) if initialBindings: bindings.update(initialBindings) # This is the crucial point: the creation of the expansion tree and the expansion. That # is where the real meal is, we had only an apetizer until now :-) top = _SPARQLNode(None,bindings,pattern.patterns, graph) top.expand(pattern.constraints) for opt in finalOptionalPatterns : bindings = _createInitialBindings(opt) if initialBindings: bindings.update(initialBindings) top.expandOptions(bindings,opt.patterns,opt.constraints) r = Query(top, graph) if retval == None : retval = r else : # This branch is, effectively, the UNION clause of the draft retval = retval + r return retval class Query : """ Result of a SPARQL query. It stores to the top of the query tree, and allows some subsequent inquiries on the expanded tree. B{This class should not be instantiated by the user,} it is done by the L{queryObject} method. """ def __init__(self,sparqlnode,triples,parent1=None,parent2=None) : """ @param sparqlnode: top of the expansion tree @type sparqlnode: _SPARQLNode @param triples: triple store @type triples: L{sparqlGraph} @param parent1: possible parent Query when queries are combined by summing them up @type parent1: L{Query} @param parent2: possible parent Query when queries are combined by summing them up @type parent2: L{Query} """ self.top = sparqlnode self.triples = triples # if this node is the result of a sum... self.parent1 = parent1 self.parent2 = parent2 def __add__(self,other) : """This may be useful when several queries are performed and one wants the 'union' of those. Caveat: the triple store must be the same for each argument. This method is used internally only anyway... Efficiency trick (I hope it works): the various additions on subgraphs are not done here; the results are calculated only if really necessary, ie, in a lazy evaluation manner. This is achieved by storing self and the 'other' in the new object """ return Query(None,self.triples,self,other) def _getFullBinding(self) : """Retrieve the full binding, ie, an array of binding dictionaries """ if self.parent1 != None and self.parent2 != None : results = self.parent1.select(None) + self.parent2.select(None) else : # remember: _processResult turns the expansion results (an array of dictionaries) # into an array of tuples in the right, original order results = self.top.returnResult(None) return results if self.parent1 != None and self.parent2 != None : return self.parent1._getFullBinding() + self.parent2._getFullBinding() else : # remember: returnResult returns an array of dictionaries return self.top.returnResult(None) def _getAllVariables(self): """Retrieve the list of all variables, to be returned""" if self.parent1 and self.parent2: return list2set(self.parent1._getAllVariables() + self.parent2._getAllVariables()) else: maxKeys = [] for bound in _fetchBoundLeaves(self.top): maxKeys.extend(bound.bindings.keys()) return list2set(maxKeys) def _orderedSelect(self,selection,orderedBy,orderDirection) : """ The variant of the selection (as below) that also includes the sorting. Because that is much less efficient, this is separated into a distinct method that is called only if necessary. It is called from the L{select} method retrieves from the result the selected bindings only). The full binding is an array of (binding) dictionaries; the sorting sorts this array by comparing the bound variables in the respective dictionaries. Once this is done, the final selection is done. @param selection: Either a single query string, or an array or tuple thereof. @param orderBy: either a function or a list of strings (corresponding to variables in the query). If None, no sorting occurs on the results. If the parameter is a function, it must take two dictionary arguments (the binding dictionaries), return -1, 0, and 1, corresponding to smaller, equal, and greater, respectively. @param orderDirection: if not None, then an array of integers of the same length as orderBy, with values the constants ASC or DESC (defined in the module). If None, an ascending order is used. @return: selection results @rtype: list of tuples @raise SPARQLError: invalid sorting arguments """ fullBinding = self._getFullBinding() if type(orderedBy) is types.FunctionType : _sortBinding = orderedBy else : orderKeys = _variablesToArray(orderedBy,"orderBy") # see the direction oDir = None # this is just to fool the interpreter's error message if orderDirection is None : oDir = [ True for i in xrange(0,len(orderKeys)) ] elif type(orderDirection) is types.BooleanType : oDir = [ orderDirection ] elif type(orderDirection) is not types.ListType and type(orderDirection) is not types.TupleType : raise SPARQLError("'orderDirection' argument must be a list") elif len(orderDirection) != len(orderKeys) : raise SPARQLError("'orderDirection' must be of an equal length to 'orderBy'") else : oDir = orderDirection def _sortBinding(b1,b2) : """The sorting method used by the array sort, with return values as required by the python run-time The to-be-compared data are dictionaries of bindings """ for i in xrange(0,len(orderKeys)) : # each key has to be compared separately. If there is a clear comparison result on that key # then we are done, but when that is not the case, the next in line should be used key = orderKeys[i] direction = oDir[i] if key in b1 and key in b2 : val1 = b1[key] val2 = b2[key] if val1 != None and val2 != None : if direction : if val1 < val2 : return -1 elif val1 > val2 : return 1 else : if val1 > val2 : return -1 elif val1 < val2 : return 1 return 0 # get the full Binding sorted fullBinding.sort(_sortBinding) # remember: _processResult turns the expansion results (an array of dictionaries) # into an array of tuples in the right, original order retval = _processResults(selection,fullBinding) return retval def select(self,selection,distinct=True,limit=None,orderBy=None,orderAscend=None,offset=0) : """ Run a selection on the query. @param selection: Either a single query string, or an array or tuple thereof. @param distinct: if True, identical results are filtered out @type distinct: Boolean @param limit: if set to an integer value, the first 'limit' number of results are returned; all of them otherwise @type limit: non negative integer @param orderBy: either a function or a list of strings (corresponding to variables in the query). If None, no sorting occurs on the results. If the parameter is a function, it must take two dictionary arguments (the binding dictionaries), return -1, 0, and 1, corresponding to smaller, equal, and greater, respectively. @param orderAscend: if not None, then an array of booelans of the same length as orderBy, True for ascending and False for descending. If None, an ascending order is used. @offset the starting point of return values in the array of results. Obviously, this parameter makes real sense if some sort of order is defined. @return: selection results @rtype: list of tuples @raise SPARQLError: invalid selection argument """ def _uniquefyList(lst) : """Return a copy of the list but possible duplicate elements are taken out. Used to post-process the outcome of the query @param lst: input list @return: result list """ if len(lst) <= 1 : return lst else : # must be careful! Using the quick method of Sets destroy the order. Ie, if this was ordered, then # a slower but more secure method should be used if orderBy != None : retval = [] for i in xrange(0,len(lst)) : v = lst[i] skip = False for w in retval : if w == v : skip = True break if not skip : retval.append(v) return retval else : return list(sets.Set(lst)) # Select may be a single query string, or an array/tuple thereof selectionF = _variablesToArray(selection,"selection") if type(offset) is not types.IntType or offset < 0 : raise SPARQLError("'offset' argument is invalid") if limit != None : if type(limit) is not types.IntType or limit < 0 : raise SPARQLError("'offset' argument is invalid") if orderBy != None : results = self._orderedSelect(selectionF,orderBy,orderAscend) else : if self.parent1 != None and self.parent2 != None : results = self.parent1.select(selectionF) + self.parent2.select(selectionF) else : # remember: _processResult turns the expansion results (an array of dictionaries) # into an array of tuples in the right, original order results = _processResults(selectionF,self.top.returnResult(selectionF)) if distinct : retval = _uniquefyList(results) else : retval = results if limit != None : if limit == 0: return [] return retval[offset:limit+offset] elif offset > 0 : return retval[offset:] else : return retval def construct(self,pattern=None) : """ Expand the subgraph based on the pattern or, if None, the internal bindings. In the former case the binding is used to instantiate the triplets in the patterns; in the latter, the original statements are used as patterns. The result is a separate triple store containing the subgraph. @param pattern: a L{GraphPattern} instance or None @return: a new triple store @rtype: L{sparqlGraph} """ if self.parent1 != None and self.parent2 != None : return self.parent1.construct(pattern) + self.parent2.construct(pattern) else : subgraph = SPARQLGraph() self.top.expandSubgraph(subgraph,pattern) return subgraph def ask(self) : """ Whether a specific pattern has a solution or not. @rtype: Boolean """ return len(self.select('*')) != 0 ######################################################################################################### # The methods below are not really part of SPARQL, or may be used to a form of DESCRIBE. However, that latter # is still in a flux in the draft, so we leave it here, pending def clusterForward(self,selection) : """ Forward clustering, using all the results of the query as seeds (when appropriate). It is based on the usage of the L{cluster forward} method for triple store. @param selection: a selection to define the seeds for clustering via the selection; the result of select used for the clustering seed @return: a new triple store @rtype: L{sparqlGraph} """ if self.parent1 != None and self.parent2 != None : return self.parent1.clusterForward(selection) + self.parent2.clusterForward(selection) else : clusterF = SPARQLGraph() for r in reduce(lambda x,y: list(x) + list(y),self.select(selection),()) : try : check_subject(r) self.triples.clusterForward(r,clusterF) except : # no real problem, this is a literal, just forget about it continue return clusterF def clusterBackward(self,selection) : """ Backward clustering, using all the results of the query as seeds (when appropriate). It is based on the usage of the L{cluster backward} method for triple store. @param selection: a selection to define the seeds for clustering via the selection; the result of select used for the clustering seed @return: a new triple store @rtype: L{sparqlGraph} """ if self.parent1 != None and self.parent2 != None : return self.parent1.clusterBackward(selection) + self.parent2.clusterBackward(selection) else : clusterB = SPARQLGraph() # to be on the safe side, see if the query has been properly finished for r in reduce(lambda x,y: list(x) + list(y),self.select(selection),()) : self.triples.clusterBackward(r,clusterB) return clusterB def cluster(self,selection) : """ Cluster: a combination of L{Query.clusterBackward} and L{Query.clusterForward}. @param selection: a selection to define the seeds for clustering via the selection; the result of select used for the clustering seed """ return self.clusterBackward(selection) + self.clusterForward(selection) def describe(self,selection,forward=True,backward=True) : """ The DESCRIBE Form in the SPARQL draft is still in state of flux, so this is just a temporary method, in fact. It may not correspond to what the final version of describe will be (if it stays in the draft at all, that is). At present, it is simply a wrapper around L{cluster}. @param selection: a selection to define the seeds for clustering via the selection; the result of select used for the clustering seed @param forward: cluster forward yes or no @type forward: Boolean @param backward: cluster backward yes or no @type backward: Boolean """ if forward and backward : return self.cluster(selection) elif forward : return self.clusterForward(selection) elif backward : return self.clusterBackward(selection) else : return SPARQLGraph() rdflib-2.4.2/rdflib/sparql/Unbound.py0000644000175000017500000000135211153616034016454 0ustar nachonachofrom rdflib.sparql import _questChar class Unbound : """A class to encapsulate a query variable. This class should be used in conjunction with L{BasicGraphPattern}.""" def __init__(self,name) : """ @param name: the name of the variable (without the '?' character) @type name: unicode or string """ if isinstance(name,basestring) : self.name = _questChar + name self.origName = name else : raise SPARQLError("illegal argument, variable name must be a string or unicode") def __repr__(self) : retval = "?%s" % self.origName return retval def __str__(self) : return self.__repr__() rdflib-2.4.2/rdflib/sparql/Algebra.py0000644000175000017500000015234111153616034016404 0ustar nachonacho#!/usr/local/bin/python # -*- coding: utf-8 -*- """ An implementation of the W3C SPARQL Algebra on top of sparql-p's expansion trees See: http://www.w3.org/TR/rdf-sparql-query/#sparqlAlgebra For each symbol in a SPARQL abstract query, we define an operator for evaluation. The SPARQL algebra operators of the same name are used to evaluate SPARQL abstract query nodes as described in the section "Evaluation Semantics". We define eval(D(G), graph pattern) as the evaluation of a graph pattern with respect to a dataset D having active graph G. The active graph is initially the default graph. """ import unittest, os from StringIO import StringIO from rdflib.Graph import Graph, ReadOnlyGraphAggregate, ConjunctiveGraph from rdflib import URIRef, Variable, plugin, BNode, Literal from rdflib.util import first from rdflib.store import Store from rdflib.sparql.bison.Query import AskQuery, SelectQuery, DescribeQuery, Query, Prolog from rdflib.sparql.bison.IRIRef import NamedGraph,RemoteGraph from rdflib.sparql.bison.SolutionModifier import ASCENDING_ORDER from rdflib.sparql import sparqlGraph, sparqlOperators, SPARQLError, Query, DESCRIBE from rdflib.sparql.bison.SPARQLEvaluate import unRollTripleItems, _variablesToArray from rdflib.sparql.bison.GraphPattern import * from rdflib.sparql.graphPattern import BasicGraphPattern from rdflib.sparql.bison.Triples import ParsedConstrainedTriples from rdflib.sparql.bison.SPARQLEvaluate import createSPARQLPConstraint,\ CONSTRUCT_NOT_SUPPORTED,convertTerm #A variable to determine whether we obey SPARQL definition of RDF dataset #which does not allow matching of default graphs (or any graph with a BNode for a name) #"An RDF Dataset comprises one graph, # the default graph, which does not have a name" - # http://www.w3.org/TR/rdf-sparql-query/#namedAndDefaultGraph DAWG_DATASET_COMPLIANCE = False def ReduceGraphPattern(graphPattern,prolog): """ Takes parsed graph pattern and converts it into a BGP operator .. Replace all basic graph patterns by BGP(list of triple patterns) .. """ if isinstance(graphPattern.triples[0],list) and len(graphPattern.triples) == 1: graphPattern.triples = graphPattern.triples[0] items = [] for triple in graphPattern.triples: bgp=BasicGraphPattern(list(unRollTripleItems(triple,prolog)),prolog) items.append(bgp) if len(items) == 1: assert isinstance(items[0],BasicGraphPattern), repr(items) bgp=items[0] return bgp elif len(items) > 1: constraints=[b.constraints for b in items if b.constraints] constraints=reduce(lambda x,y:x+y,constraints,[]) def mergeBGPs(left,right): if isinstance(left,BasicGraphPattern): left = left.patterns if isinstance(right,BasicGraphPattern): right = right.patterns return left+right bgp=BasicGraphPattern(reduce(mergeBGPs,items),prolog) bgp.addConstraints(constraints) return bgp else: #an empty BGP? raise def ReduceToAlgebra(left,right): """ Converts a parsed Group Graph Pattern into an expression in the algebra by recursive folding / reduction (via functional programming) of the GGP as a list of Basic Triple Patterns or "Graph Pattern Blocks" 12.2.1 Converting Graph Patterns [20] GroupGraphPattern ::= '{' TriplesBlock? ( ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? )* '}' [22] GraphPatternNotTriples ::= OptionalGraphPattern | GroupOrUnionGraphPattern | GraphGraphPattern [26] Filter ::= 'FILTER' Constraint [27] Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall [56] BrackettedExpression ::= '(' ConditionalOrExpression ')' ( GraphPatternNotTriples | Filter ) '.'? TriplesBlock? nonTripleGraphPattern filter triples """ if not isinstance(right,AlgebraExpression): if isinstance(right,ParsedGroupGraphPattern): right = reduce(ReduceToAlgebra,right,None) print right;raise assert isinstance(right,GraphPattern),type(right) #Parsed Graph Pattern if right.triples: if right.nonTripleGraphPattern: #left is None, just return right (a GraphPatternNotTriples) if isinstance(right.nonTripleGraphPattern,ParsedGraphGraphPattern): right = Join(ReduceGraphPattern(right,prolog), GraphExpression( right.nonTripleGraphPattern.name, reduce(ReduceToAlgebra, right.nonTripleGraphPattern.graphPatterns, None))) elif isinstance(right.nonTripleGraphPattern, ParsedOptionalGraphPattern): # Join(LeftJoin( ..left.. ,{..}),..triples..) if left: assert isinstance(left,(Join,BasicGraphPattern)),repr(left) rightTriples = ReduceGraphPattern(right,prolog) LJright = LeftJoin(left, reduce(ReduceToAlgebra, right.nonTripleGraphPattern.graphPatterns, None)) return Join(LJright,rightTriples) else: # LeftJoin({},right) => {} #see http://lists.w3.org/Archives/Public/public-rdf-dawg/2007AprJun/0046.html return EmptyGraphPatternExpression() elif isinstance(right.nonTripleGraphPattern, ParsedAlternativeGraphPattern): #Join(Union(..),..triples..) unionList =\ [ reduce(ReduceToAlgebra,i.graphPatterns,None) for i in right.nonTripleGraphPattern.alternativePatterns ] right = Join(reduce(Union,unionList), ReduceGraphPattern(right,prolog)) else: raise Exception(right) else: if isinstance(left,BasicGraphPattern) and left.constraints: if right.filter: if not left.patterns: #{ } FILTER E1 FILTER E2 BGP(..) filter2=createSPARQLPConstraint(right.filter,prolog) right = ReduceGraphPattern(right,prolog) right.addConstraints(left.constraints) right.addConstraint(filter2) return right else: #BGP(..) FILTER E1 FILTER E2 BGP(..) left.addConstraint(createSPARQLPConstraint(right.filter, prolog)) right = ReduceGraphPattern(right,prolog) else: if right.filter: #FILTER ... filter=createSPARQLPConstraint(right.filter,prolog) right = ReduceGraphPattern(right,prolog) right.addConstraint(filter) else: #BGP(..) right = ReduceGraphPattern(right,prolog) else: #right.triples is None if right.nonTripleGraphPattern is None: if right.filter: if isinstance(left,BasicGraphPattern): #BGP(...) FILTER left.addConstraint(createSPARQLPConstraint(right.filter, prolog)) return left else: pattern=BasicGraphPattern() pattern.addConstraint(createSPARQLPConstraint(right.filter, prolog)) if left is None: return pattern else: right=pattern else: raise Exception(right) elif right.nonTripleGraphPattern: if isinstance(right.nonTripleGraphPattern,ParsedGraphGraphPattern): # Join(left,Graph(...)) right = GraphExpression(right.nonTripleGraphPattern.name, reduce(ReduceToAlgebra, right.nonTripleGraphPattern.graphPatterns, None)) elif isinstance(right.nonTripleGraphPattern,ParsedOptionalGraphPattern): if left: # LeftJoin(left,right) return LeftJoin(left, reduce(ReduceToAlgebra, right.nonTripleGraphPattern.graphPatterns, None)) else: # LeftJoin({},right) #see - http://lists.w3.org/Archives/Public/public-rdf-dawg/2007AprJun/0046.html return EmptyGraphPatternExpression() elif isinstance(right.nonTripleGraphPattern, ParsedAlternativeGraphPattern): #right = Union(..) unionList =\ map(lambda i: reduce(ReduceToAlgebra,i.graphPatterns,None), right.nonTripleGraphPattern.alternativePatterns) right = reduce(Union,unionList) else: raise Exception(right) if not left: return right else: return Join(left,right) def RenderSPARQLAlgebra(parsedSPARQL,nsMappings=None): nsMappings = nsMappings and nsMappings or {} global prolog prolog = parsedSPARQL.prolog if prolog is not None: prolog.DEBUG = False else: prolog = Prolog(None, []) prolog.DEBUG=False return reduce(ReduceToAlgebra, parsedSPARQL.query.whereClause.parsedGraphPattern.graphPatterns,None) def LoadGraph(dtSet,dataSetBase,graph): #An RDF URI dereference, following TAG best practices #Need a hook (4Suite) to bypass urllib's inability #to implement URI RFC verbatim - problematic for descendent #specifications try: from Ft.Lib.Uri import UriResolverBase as Resolver from Ft.Lib.Uri import GetScheme, OsPathToUri except: def OsPathToUri(path): return path def GetScheme(uri): return None class Resolver: supportedSchemas=[None] def resolve(self, uriRef, baseUri): return uriRef if dataSetBase is not None: res = Resolver() scheme = GetScheme(dtSet) or GetScheme(dataSetBase) if scheme not in res.supportedSchemes: dataSetBase = OsPathToUri(dataSetBase) source=Resolver().resolve(str(dtSet), dataSetBase) else: source = dtSet #GRDDL hook here! try: #Try as RDF/XML first (without resolving) graph.parse(source) except: try: #Parse as Notation 3 instead source=Resolver().resolve(str(dtSet), dataSetBase) graph.parse(source,format='n3') except: raise #RDFa? graph.parse(dtSet,format='rdfa') def TopEvaluate(query,dataset,passedBindings = None,DEBUG=False,exportTree=False, dataSetBase=None, extensionFunctions={}): """ The outcome of executing a SPARQL is defined by a series of steps, starting from the SPARQL query as a string, turning that string into an abstract syntax form, then turning the abstract syntax into a SPARQL abstract query comprising operators from the SPARQL algebra. This abstract query is then evaluated on an RDF dataset. """ if not passedBindings: passedBindings = {} global prolog if query.prolog: query.prolog.DEBUG = DEBUG prolog = query.prolog prolog.extensionFunctions.update(extensionFunctions) if query.query.dataSets: graphs = [] for dtSet in query.query.dataSets: if isinstance(dtSet,NamedGraph): newGraph = Graph(dataset.store,dtSet) LoadGraph(dtSet,dataSetBase,newGraph) graphs.append(newGraph) else: #"Each FROM clause contains an IRI that indicates a graph to be # used to form the default graph. This does not put the graph # in as a named graph." -- 8.2.1 Specifying the Default Graph if DAWG_DATASET_COMPLIANCE: #@@ this should indicate a merge into the 'default' graph # per http://www.w3.org/TR/rdf-sparql-query/#unnamedGraph # (8.2.1 Specifying the Default Graph) assert isinstance(dataset,ConjunctiveGraph) memGraph = dataset.default_context else: memStore = plugin.get('IOMemory',Store)() memGraph = Graph(memStore) LoadGraph(dtSet,dataSetBase,memGraph) if memGraph.identifier not in [g.identifier for g in graphs]: graphs.append(memGraph) tripleStore = sparqlGraph.SPARQLGraph(ReadOnlyGraphAggregate(graphs, store=dataset.store), dSCompliance=DAWG_DATASET_COMPLIANCE) else: tripleStore = sparqlGraph.SPARQLGraph(dataset, dSCompliance=DAWG_DATASET_COMPLIANCE) if isinstance(query.query,SelectQuery) and query.query.variables: query.query.variables = [convertTerm(item,query.prolog) for item in query.query.variables] else: query.query.variables = [] expr = reduce(ReduceToAlgebra,query.query.whereClause.parsedGraphPattern.graphPatterns, None) if isinstance(expr,BasicGraphPattern): retval = None bindings = Query._createInitialBindings(expr) if passedBindings: bindings.update(passedBindings) top = Query._SPARQLNode(None,bindings,expr.patterns, tripleStore,expr=expr) top.expand(expr.constraints) # for tree in Query._fetchBoundLeaves(top): # print_tree(tree) # print "---------------" result = Query.Query(top, tripleStore) else: assert isinstance(expr,AlgebraExpression), repr(expr) if DEBUG: print "## Full SPARQL Algebra expression ##" print expr print "###################################" result = expr.evaluate(tripleStore,passedBindings,query.prolog) if isinstance(result,BasicGraphPattern): retval = None bindings = Query._createInitialBindings(result) if passedBindings: bindings.update(passedBindings) top = Query._SPARQLNode(None,bindings,result.patterns, result.tripleStore,expr=result) top.expand(result.constraints) result = Query.Query(top, tripleStore) assert isinstance(result,Query.Query),repr(result) if exportTree: from rdflib.sparql.Visualization import ExportExpansionNode if result.top: ExportExpansionNode(result.top,fname='out.svg',verbose=True) else: ExportExpansionNode(result.parent1.top,fname='out1.svg',verbose=True) ExportExpansionNode(result.parent2.top,fname='out2.svg',verbose=True) if result == None : # generate some proper output for the exception :-) msg = "Errors in the patterns, no valid query object generated; " msg += ("pattern:\n%s\netc..." % basicPatterns[0]) raise SPARQLError(msg) if isinstance(query.query,AskQuery): return result.ask() elif isinstance(query.query,SelectQuery): orderBy = None orderAsc = None if query.query.solutionModifier.orderClause: orderBy = [] orderAsc = [] for orderCond in query.query.solutionModifier.orderClause: # is it a variable? if isinstance(orderCond,Variable): orderBy.append(orderCond) orderAsc.append(ASCENDING_ORDER) # is it another expression, only variables are supported else: expr = orderCond.expression assert isinstance(expr,Variable),\ "Support for ORDER BY with anything other than a variable is not supported: %s"%expr orderBy.append(expr) orderAsc.append(orderCond.order == ASCENDING_ORDER) if query.query.solutionModifier.limitClause is not None: limit = int(query.query.solutionModifier.limitClause) else: limit = None if query.query.solutionModifier.offsetClause is not None: offset = int(query.query.solutionModifier.offsetClause) else: offset = 0 topUnionBindings=[] selection=result.select(query.query.variables, query.query.distinct, limit, orderBy, orderAsc, offset ) selectionF = Query._variablesToArray(query.query.variables,"selection") if result.parent1 != None and result.parent2 != None : topUnionBindings=reduce(lambda x,y:x+y, [root.returnResult(selectionF) \ for root in fetchUnionBranchesRoots(result)]) else: if (limit == 0 or limit is not None or offset is not None and \ offset > 0): topUnionBindings=[] else: topUnionBindings=result.top.returnResult(selectionF) return selection,\ _variablesToArray(query.query.variables,"selection"),\ result._getAllVariables(),\ orderBy,query.query.distinct,\ topUnionBindings elif isinstance(query.query,DescribeQuery): if query.query.solutionModifier.limitClause is not None: limit = int(query.query.solutionModifier.limitClause) else: limit = None if query.query.solutionModifier.offsetClause is not None: offset = int(query.query.solutionModifier.offsetClause) else: offset = 0 if result.parent1 != None and result.parent2 != None : rt=(r for r in reduce(lambda x,y:x+y, [root.returnResult(selectionF) \ for root in fetchUnionBranchesRoots(result)])) elif limit is not None or offset != 0: raise NotImplemented("Solution modifiers cannot be used with DESCRIBE") else: rt=result.top.returnResult(None) rtGraph=Graph() for binding in rt: g=extensionFunctions[DESCRIBE](query.query.describeVars, binding, tripleStore.graph) return g else: # 10.2 CONSTRUCT # The CONSTRUCT query form returns a single RDF graph specified by a graph # template. The result is an RDF graph formed by taking each query solution # in the solution sequence, substituting for the variables in the graph # template, and combining the triples into a single RDF graph by set union. if query.query.solutionModifier.limitClause is not None: limit = int(query.query.solutionModifier.limitClause) else: limit = None if query.query.solutionModifier.offsetClause is not None: offset = int(query.query.solutionModifier.offsetClause) else: offset = 0 if result.parent1 != None and result.parent2 != None : rt=(r for r in reduce(lambda x,y:x+y, [root.returnResult(selectionF) \ for root in fetchUnionBranchesRoots(result)])) elif limit is not None or offset != 0: raise NotImplemented("Solution modifiers cannot be used with CONSTRUCT") else: rt=result.top.returnResult(None) rtGraph=Graph() for binding in rt: for s,p,o,func in ReduceGraphPattern(query.query.triples,prolog).patterns: s,p,o=map(lambda x:isinstance(x,Variable) and binding.get(x) or x,[s,p,o]) #If any such instantiation produces a triple containing an unbound #variable or an illegal RDF construct, such as a literal in subject #or predicate position, then that triple is not included in the #output RDF graph. if not [i for i in [s,p,o] if isinstance(i,Variable)]: rtGraph.add((s,p,o)) return rtGraph class AlgebraExpression(object): """ For each symbol in a SPARQL abstract query, we define an operator for evaluation. The SPARQL algebra operators of the same name are used to evaluate SPARQL abstract query nodes as described in the section "Evaluation Semantics". """ def __repr__(self): return "%s(%s,%s)"%(self.__class__.__name__,self.left,self.right) def evaluate(self,tripleStore,initialBindings,prolog): """ 12.5 Evaluation Semantics We define eval(D(G), graph pattern) as the evaluation of a graph pattern with respect to a dataset D having active graph G. The active graph is initially the default graph. """ raise Exception(repr(self)) class EmptyGraphPatternExpression(AlgebraExpression): """ A placeholder for evaluating empty graph patterns - which should result in an empty multiset of solution bindings """ def __repr__(self): return "EmptyGraphPatternExpression(..)" def evaluate(self,tripleStore,initialBindings,prolog): #raise NotImplementedError("Empty Graph Pattern expressions, not supported") if prolog.DEBUG: print "eval(%s,%s,%s)"%(self,initialBindings,tripleStore.graph) empty = Query._SPARQLNode(None,{},[],tripleStore) empty.bound = False return Query.Query(empty, tripleStore) def fetchUnionBranchesRoots(node): for parent in [node.parent1,node.parent2]: if parent.parent1: for branch_root in fetchUnionBranchesRoots(parent): yield branch_root else: yield parent.top def fetchChildren(node): if isinstance(node,Query._SPARQLNode): yield [c for c in node.children] elif isinstance(node,Query.Query): if node.parent1 is None: for c in fetchChildren(node.top): yield c else: for parent in [node.parent1,node.parent2]: for c in fetchChildren(parent): yield c def walktree(top, depthfirst = True, leavesOnly = True, optProxies=False): #assert top.parent1 is None if isinstance(top,Query._SPARQLNode) and top.clash: return if not depthfirst and (not leavesOnly or not top.children): proxies=False for optChild in reduce(lambda x,y: x+y,[list(Query._fetchBoundLeaves(o)) for o in top.optionalTrees],[]): proxies=True yield optChild if not proxies: yield top children=reduce(lambda x,y:x+y,list(fetchChildren(top))) # if isinstance(top,Query._SPARQLNode) or isinstance(top,Query.Query) and \ # top.parent1 is None: # children = top.children # else: # children = top.parent1.children + top.parent2.children for child in children: if child.children: for newtop in walktree(child, depthfirst,leavesOnly,optProxies): yield newtop else: proxies=False for optChild in reduce(lambda x,y: x+y,[list(Query._fetchBoundLeaves(o)) for o in child.optionalTrees],[]): proxies=True yield optChild if not proxies: yield child if depthfirst and (not leavesOnly or not children): proxies=False for optChild in reduce(lambda x,y: x+y,[list(Query._fetchBoundLeaves(o)) for o in top.optionalTrees],[]): proxies=True yield optChild if not proxies: yield top def print_tree(node, padding=' '): print padding[:-1] + repr(node) padding = padding + ' ' count = 0 #_children1=reduce(lambda x,y:x+y,list(fetchChildren(node))) for child in node.children:#_children1: count += 1 print padding + '|' if child.children: if count == len(node.children): print_tree(child, padding + ' ') else: print_tree(child, padding + '|') else: print padding + '+-' + repr(child) + ' ' + repr(dict([(k,v) for k,v in child.bindings.items() if v])) optCount=0 for optTree in child.optionalTrees: optCount += 1 print padding + '||' if optTree.children: if optCount == len(child.optionalTrees): print_tree(optTree, padding + ' ') else: print_tree(optTree, padding + '||') else: print padding + '+=' + repr(optTree) count = 0 for optTree in node.optionalTrees: count += 1 print padding + '||' if optTree.children: if count == len(node.optionalTrees): print_tree(optTree, padding + ' ') else: print_tree(optTree, padding + '||') else: print padding + '+=' + repr(optTree) def _ExpandJoin(node,expression,tripleStore,prolog,optionalTree=False): """ Traverses to the leaves of expansion trees to implement the Join operator """ if prolog.DEBUG: print_tree(node) print "-------------------" #for node in BF_leaf_traversal(node): currExpr = expression for node in walktree(node): if node.clash: continue assert len(node.children) == 0 if prolog.DEBUG: print "Performing Join(%s,..)"%node if isinstance(currExpr,AlgebraExpression): #If an algebra expression evaluate it passing on the leaf bindings if prolog.DEBUG: print "passing on bindings to %s\n:%s"%(currExpr,node.bindings.copy()) expression = currExpr.evaluate(tripleStore,node.bindings.copy(),prolog) else: expression = currExpr if isinstance(expression,BasicGraphPattern): tS = tripleStore if hasattr(expression,'tripleStore'): if prolog.DEBUG: print "has tripleStore: ",expression.tripleStore tS = expression.tripleStore if prolog.DEBUG: print "Evaluated left node and traversed to leaf, expanding with ", expression print node.tripleStore.graph print "expressions bindings: ", Query._createInitialBindings(expression) print "node bindings: ", node.bindings exprBindings = Query._createInitialBindings(expression) exprBindings.update(node.bindings) #An indicator for whether this node has any descendant optional expansions #we should consider instead #in Join(LeftJoin(A,B),X), if the inner LeftJoin is successful, #then X is joined #against the cumulative bindings ( instead of just A ) descendantOptionals = node.optionalTrees and \ [o for o in node.optionalTrees if list(Query._fetchBoundLeaves(o))] if not descendantOptionals: top = node else: if prolog.DEBUG: print "descendant optionals: ", descendantOptionals top = None child = None if not node.clash and not descendantOptionals: #It has compatible bindings and either no optional expansions #or no *valid* optional expansions child = Query._SPARQLNode(top, exprBindings, expression.patterns, tS, expr=node.expr) child.expand(expression.constraints) if prolog.DEBUG: print "Has compatible bindings and no valid optional expansions" print "Newly bound descendants: " for c in Query._fetchBoundLeaves(child): print "\t",c, c.bound print c.bindings else: assert isinstance(expression,Query.Query) if not expression.top: #already evaluated a UNION - fetch UNION branches child = list(fetchUnionBranchesRoots(expression)) else: #Already been evaluated (non UNION), just attach the SPARQLNode child = expression.top if isinstance(child,Query._SPARQLNode): if node.clash == False and child is not None: node.children.append(child) if prolog.DEBUG: print "Adding %s to %s (a UNION branch)"%(child,node) else: assert isinstance(child,list) for newChild in child: # if not newChild.clash: node.children.append(newChild) if prolog.DEBUG: print "Adding %s to %s"%(child,node) if prolog.DEBUG: print_tree(node) print "-------------------" for optTree in node.optionalTrees: #Join the optional paths as well - those that are bound and valid for validLeaf in Query._fetchBoundLeaves(optTree): _ExpandJoin(validLeaf, expression, tripleStore, prolog, optionalTree=True) class Join(AlgebraExpression): """ [[(P1 AND P2)]](D,G) = [[P1]](D,G) compat [[P2]](D,G) Join(Ω1, Ω2) = { merge(μ1, μ2) | μ1 in Ω1 and μ2 in Ω2, and μ1 and μ2 are \ compatible } Pseudocode implementation: Evaluate BGP1 Traverse to leaves (expand and expandOption leaves) of BGP1, set 'rest' to triple patterns in BGP2 (filling out bindings). Trigger another round of expand / expandOptions (from the leaves) """ def __init__(self,BGP1,BGP2): self.left = BGP1 self.right = BGP2 def evaluate(self,tripleStore,initialBindings,prolog): if prolog.DEBUG: print "eval(%s,%s,%s)"%(self,initialBindings,tripleStore.graph) if isinstance(self.left,AlgebraExpression): left = self.left.evaluate(tripleStore,initialBindings,prolog) else: left = self.left if isinstance(left,BasicGraphPattern): retval = None bindings = Query._createInitialBindings(left) if initialBindings: bindings.update(initialBindings) if hasattr(left,'tripleStore'): #Use the prepared tripleStore lTS = left.tripleStore else: lTS = tripleStore top = Query._SPARQLNode(None, bindings, left.patterns, lTS, expr=left) top.expand(left.constraints) _ExpandJoin(top,self.right,tripleStore,prolog) return Query.Query(top, tripleStore) else: assert isinstance(left,Query.Query), repr(left) if left.parent1 and left.parent2: #union branch. We need to unroll all operands (recursively) for union_root in fetchUnionBranchesRoots(left): _ExpandJoin(union_root,self.right,tripleStore,prolog) else: for b in Query._fetchBoundLeaves(left.top): _ExpandJoin(b,self.right,tripleStore,prolog) return left def _ExpandLeftJoin(node,expression,tripleStore,prolog,optionalTree=False): """ Traverses to the leaves of expansion trees to implement the LeftJoin operator """ currExpr = expression if prolog.DEBUG: print "DFS and LeftJoin expansion of " print_tree(node) print "---------------------" print node.bindings for node in walktree(node,optProxies=True): if node.clash: continue assert len(node.children) == 0 # this is a leaf in the original expansion if prolog.DEBUG: print "Performing LeftJoin(%s,..)"%node if isinstance(currExpr,AlgebraExpression): #If a Graph pattern evaluate it passing on the leaf bindings #(possibly as solutions to graph names if prolog.DEBUG: print "evaluating B in LeftJoin(A,B)" print "passing on bindings to %s\n:%s"%(currExpr, node.bindings.copy()) expression = currExpr.evaluate(tripleStore,node.bindings.copy(), prolog) else: expression = currExpr if isinstance(expression,BasicGraphPattern): rightBindings = Query._createInitialBindings(expression) rightBindings.update(node.bindings) optTree = Query._SPARQLNode(None, rightBindings, expression.patterns, tripleStore, expr=expression) if prolog.DEBUG: print "evaluating B in LeftJoin(A,B) - a BGP: ", expression print "Passing on bindings ",rightBindings optTree.expand(expression.constraints) for proxy in Query._fetchBoundLeaves(optTree): #Mark a successful evaluation of LeftJoin (new bindings were added) #these become proxies for later expressions proxy.priorLeftJoin=True else: if prolog.DEBUG: print "Attaching previously evaluated node: ", expression.top assert isinstance(expression,Query.Query) if not expression.top: #already evaluated a UNION - fetch UNION branches optTree = list(fetchUnionBranchesRoots(expression)) else: #Already been evaluated (non UNION), just attach the SPARQLNode optTree = expression.top if prolog.DEBUG: print "Optional tree: ", optTree if isinstance(optTree,Query._SPARQLNode): if optTree.clash == False and optTree is not None: node.optionalTrees.append(optTree) if prolog.DEBUG: print "Adding %s to %s (a UNION branch)"%(optTree, node.optionalTrees) else: assert isinstance(optTree,list) for newChild in optTree: # if not newChild.clash: node.optionalTrees.append(newChild) if prolog.DEBUG: print "Adding %s to %s"%(newChild,node.optionalTrees) if prolog.DEBUG: print "DFS after LeftJoin expansion " print_tree(node) print "---------------------" class LeftJoin(AlgebraExpression): """ Let Ω1 and Ω2 be multisets of solution mappings and F a filter. We define: LeftJoin(Ω1, Ω2, expr) = Filter(expr, Join(Ω1, Ω2)) set-union Diff(Ω1, Ω2, expr) LeftJoin(Ω1, Ω2, expr) = { merge(μ1, μ2) | μ1 in Ω1 and μ2 in Ω2, and μ1 and μ2 are compatible, and expr(merge(μ1, μ2)) is true } set-union { μ1 | μ1 in Ω1 and μ2 in Ω2, and μ1 and μ2 are not compatible } set-union { μ1 | μ1 in Ω1and μ2 in Ω2, and μ1 and μ2 are compatible and expr(merge(μ1, μ2)) is false } """ def __init__(self,BGP1,BGP2,expr=None): self.left = BGP1 self.right = BGP2 def evaluate(self,tripleStore,initialBindings,prolog): if prolog.DEBUG: print "eval(%s,%s,%s)"%(self,initialBindings,tripleStore.graph) if isinstance(self.left,AlgebraExpression): #print "evaluating A in LeftJoin(A,B) - an expression" left = self.left.evaluate(tripleStore,initialBindings,prolog) else: left = self.left if isinstance(left,BasicGraphPattern): #print "expanding A in LeftJoin(A,B) - a BGP: ", left retval = None bindings = Query._createInitialBindings(left) if initialBindings: bindings.update(initialBindings) if hasattr(left,'tripleStore'): #Use the prepared tripleStore tripleStore = left.tripleStore top = Query._SPARQLNode(None, bindings, left.patterns, tripleStore, expr=left) top.expand(left.constraints) for b in Query._fetchBoundLeaves(top): _ExpandLeftJoin(b,self.right,tripleStore,prolog) #_ExpandLeftJoin(top,self.right,tripleStore,prolog) return Query.Query(top, tripleStore) else: assert isinstance(left,Query.Query), repr(left) if left.parent1 and left.parent2: for union_root in fetchUnionBranchesRoots(left): _ExpandLeftJoin(union_root,self.right,tripleStore,prolog) else: for b in Query._fetchBoundLeaves(left.top): _ExpandLeftJoin(b,self.right,tripleStore,prolog) #_ExpandLeftJoin(left.top,self.right,tripleStore,prolog) return left class Union(AlgebraExpression): """ II. [[(P1 UNION P2)]](D,G) = [[P1]](D,G) OR [[P2]](D,G) Union(Ω1, Ω2) = { μ | μ in Ω1 or μ in Ω2 } """ def __init__(self,BGP1,BGP2): self.left = BGP1 self.right = BGP2 def evaluate(self,tripleStore,initialBindings,prolog): if prolog.DEBUG: print "eval(%s,%s,%s)"%(self,initialBindings,tripleStore.graph) if isinstance(self.left,AlgebraExpression): left = self.left.evaluate(tripleStore,initialBindings,prolog) else: left = self.left if isinstance(left,BasicGraphPattern): #The left expression has not been evaluated retval = None bindings = Query._createInitialBindings(left) if initialBindings: bindings.update(initialBindings) top = Query._SPARQLNode(None, bindings, left.patterns, tripleStore, expr=left) top.expand(left.constraints) top = Query.Query(top, tripleStore) else: #The left expression has already been evaluated assert isinstance(left,Query.Query), repr(left) top = left #Now we evaluate the right expression (independently) if isinstance(self.right,AlgebraExpression): #If it is a GraphExpression, 'reduce' it right = self.right.evaluate(tripleStore,initialBindings,prolog) else: right = self.right tS = tripleStore if isinstance(right,BasicGraphPattern): if hasattr(right,'tripleStore'): tS = right.tripleStore rightBindings = Query._createInitialBindings(right) if initialBindings: rightBindings.update(initialBindings) rightNode = Query._SPARQLNode(None, rightBindings, right.patterns, tS, expr=right) rightNode.expand(right.constraints) else: assert isinstance(right,Query.Query), repr(right) rightNode = right.top # if prolog.DEBUG: # print "### Two UNION trees ###" # print self.left # print_tree(top.top) # print self.right # print_tree(rightNode) # print "#######################" #The UNION semantics are implemented by the overidden __add__ method return top + Query.Query(rightNode, tS) class GraphExpression(AlgebraExpression): """ [24] GraphGraphPattern ::= 'GRAPH' VarOrIRIref GroupGraphPattern eval(D(G), Graph(IRI,P)) = eval(D(D[i]), P) eval(D(G), Graph(var,P)) = multiset-union over IRI i in D : Join( eval(D(D[i]), P) , Omega(?v->i) ) """ def __init__(self,iriOrVar,GGP): self.iriOrVar = iriOrVar self.GGP = GGP def __repr__(self): return "Graph(%s,%s)"%(self.iriOrVar,self.GGP) def evaluate(self,tripleStore,initialBindings,prolog): """ .. The GRAPH keyword is used to make the active graph one of all of the named graphs in the dataset for part of the query ... """ if prolog.DEBUG: print "eval(%s,%s,%s)"%(self,initialBindings,tripleStore.graph) if isinstance(self.iriOrVar,Variable): #A variable: if self.iriOrVar in initialBindings: #assert initialBindings[self.iriOrVar], "Empty binding for GRAPH variable!" if prolog.DEBUG: print "Passing on unified graph name: ", initialBindings[self.iriOrVar] tripleStore = sparqlGraph.SPARQLGraph( Graph(tripleStore.store, initialBindings[self.iriOrVar]) ,dSCompliance=DAWG_DATASET_COMPLIANCE) else: if prolog.DEBUG: print "Setting up BGP to return additional bindings for %s"%self.iriOrVar tripleStore = sparqlGraph.SPARQLGraph(tripleStore.graph, graphVariable = self.iriOrVar, dSCompliance=DAWG_DATASET_COMPLIANCE) else: graphName = self.iriOrVar graphName = convertTerm(graphName,prolog) if isinstance(tripleStore.graph,ReadOnlyGraphAggregate): targetGraph = [g for g in tripleStore.graph.graphs if g.identifier == graphName] #assert len(targetGraph) == 1 targetGraph = targetGraph[0] else: targetGraph = Graph(tripleStore.store,graphName) tripleStore = sparqlGraph.SPARQLGraph(targetGraph, dSCompliance=\ DAWG_DATASET_COMPLIANCE) if isinstance(self.GGP,AlgebraExpression): #Dont evaluate return self.GGP.evaluate(tripleStore,initialBindings,prolog) else: assert isinstance(self.GGP,BasicGraphPattern),repr(self.GGP) #Attach the prepared triple store to the BGP self.GGP.tripleStore = tripleStore return self.GGP ######################################### # Tests # ######################################### TEST1="BASE SELECT * WHERE { ?s :p1 ?v1 ; :p2 ?v2 }" #BGP( ?s :p1 ?v1 .?s :p2 ?v2 ) TEST1_REPR=\ "BGP((?s,http://example.com/p1,?v1),(?s,http://example.com/p2,?v2))" TEST2 = "BASE SELECT * WHERE { { ?s :p1 ?v1 } UNION {?s :p2 ?v2 } }" #Union( BGP(?s :p1 ?v1) , BGP(?s :p2 ?v2) ) TEST2_REPR=\ "Union(BGP((?s,http://example.com/p1,?v1)),BGP((?s,http://example.com/p2,?v2)))" TEST3 = "BASE SELECT * WHERE { ?s :p1 ?v1 OPTIONAL {?s :p2 ?v2 } }" #LeftJoin(BGP(?s :p1 ?v1), BGP(?s :p2 ?v2), true) TEST3_REPR=\ "LeftJoin(BGP((?s,http://example.com/p1,?v1)),BGP((?s,http://example.com/p2,?v2)))" TEST4 = "BASE SELECT * WHERE { ?s :p ?o. { ?s :p1 ?v1 } UNION {?s :p2 ?v2 } }" #Join(BGP(?s :p ?v),Union(BGP(?s :p1 ?v1), BGP(?s :p2 ?v2))) TEST4_REPR=\ "Join(BGP((?s,http://example.com/p,?o)),Union(BGP((?s,http://example.com/p1,?v1)),BGP((?s,http://example.com/p2,?v2))))" TEST5 = "BASE SELECT * WHERE { ?a ?b ?c OPTIONAL { ?s :p1 ?v1 } }" #Join(BGP(?s :p ?v),Union(BGP(?s :p1 ?v1), BGP(?s :p2 ?v2))) TEST5_REPR=\ "LeftJoin(BGP((?a,?b,?c)),BGP((?s,http://example.com/p1,?v1)))" TEST6="BASE SELECT * WHERE { ?a :b :c OPTIONAL {:x :y :z} { :x1 :y1 :z1 } UNION { :x2 :y2 :z2 } }" TEST6_REPR=\ "Join(LeftJoin(BGP((?a,http://example.com/b,http://example.com/c)),BGP((http://example.com/x,http://example.com/y,http://example.com/z))),Union(BGP((http://example.com/x1,http://example.com/y1,http://example.com/z1)),BGP((http://example.com/x2,http://example.com/y2,http://example.com/z2))))" TEST7="BASE SELECT * WHERE { ?s :p1 ?v1 OPTIONAL { ?s :p2 ?v2. FILTER( ?v1 < 3 ) } }" TEST7_REPR=\ "LeftJoin(BGP((?s,http://example.com/p1,?v1)),Filter(.. a filter ..,BGP(?s,http://example.com/p2,?v2)))" TEST8="BASE SELECT * WHERE { ?s :p1 ?v1. FILTER ( ?v1 < 3 ) OPTIONAL { ?s :p3 ?v3 } }" TEST8_REPR=\ "LeftJoin(Filter(.. a filter ..,BGP(?s,http://example.com/p1,?v1)),BGP((?s,http://example.com/p3,?v3)))" TEST10=\ """ PREFIX data: PREFIX foaf: PREFIX rdfs: SELECT ?mbox ?nick ?ppd FROM NAMED FROM NAMED WHERE { GRAPH data:aliceFoaf { ?alice foaf:mbox ; foaf:knows ?whom . ?whom foaf:mbox ?mbox ; rdfs:seeAlso ?ppd . ?ppd a foaf:PersonalProfileDocument . } . GRAPH ?ppd { ?w foaf:mbox ?mbox ; foaf:nick ?nick } }""" reducableSPARQL=\ """ PREFIX mf: PREFIX qt: PREFIX rdfs: SELECT ?test ?testName ?testComment ?query ?result ?testAction WHERE { { ?test a mf:QueryEvaluationTest } UNION { ?test a } ?test mf:name ?testName. OPTIONAL { ?test rdfs:comment ?testComment } ?test mf:action ?testAction; mf:result ?result. ?testAction qt:query ?query }""" reducableSPARQLExpr=\ "Join(LeftJoin(Join(Union(BGP((?test,http://www.w3.org/1999/02/22-rdf-syntax-ns#type,mf:QueryEvaluationTest)),BGP((?test,http://www.w3.org/1999/02/22-rdf-syntax-ns#type,http://jena.hpl.hp.com/2005/05/test-manifest-extra#TestQuery))),BGP((?test,mf:name,?testName))),BGP((?test,rdfs:comment,?testComment))),BGP((?test,mf:action,?testAction),(?test,mf:result,?result),(?testAction,qt:query,?query)))" ExprTests = \ [ (TEST1,TEST1_REPR), (TEST2,TEST2_REPR), (TEST3,TEST3_REPR), (TEST4,TEST4_REPR), (TEST5,TEST5_REPR), (TEST6,TEST6_REPR), (TEST7,TEST7_REPR), (TEST8,TEST8_REPR), (reducableSPARQL,reducableSPARQLExpr), ] test_graph_a = """ @prefix foaf: . @prefix rdf: . @prefix rdfs: . _:a foaf:name "Alice" . _:a foaf:mbox . _:a foaf:knows _:b . _:b foaf:name "Bob" . _:b foaf:mbox . _:b foaf:nick "Bobby" . _:b rdfs:seeAlso . rdf:type foaf:PersonalProfileDocument .""" test_graph_b = """ @prefix foaf: . @prefix rdf: . @prefix rdfs: . _:z foaf:mbox . _:z rdfs:seeAlso . _:z foaf:nick "Robert" . rdf:type foaf:PersonalProfileDocument .""" scopingQuery=\ """ PREFIX data: PREFIX foaf: PREFIX rdfs: SELECT ?ppd FROM NAMED FROM NAMED WHERE { GRAPH ?ppd { ?b foaf:name "Bob" . } . GRAPH ?ppd { ?doc a foaf:PersonalProfileDocument . } }""" class TestSPARQLAlgebra(unittest.TestCase): def setUp(self): self.store = plugin.get('IOMemory', Store)() self.graph1 = Graph(self.store,identifier=URIRef('http://example.org/foaf/aliceFoaf')) self.graph1.parse(StringIO(test_graph_a), format="n3") self.graph2 = Graph(self.store,identifier=URIRef('http://example.org/foaf/bobFoaf')) self.graph2.parse(StringIO(test_graph_b), format="n3") self.unionGraph = ReadOnlyGraphAggregate(graphs=[self.graph1,self.graph2],store=self.store) # def testScoping(self): # from rdflib.sparql.bison.Processor import Parse # from rdflib.sparql.QueryResult import SPARQLQueryResult # from rdflib.sparql.bison.Query import Prolog # p = Parse(scopingQuery) # prolog = p.prolog # if prolog is None: # prolog = Prolog(u'',[]) # prolog.DEBUG = True # rt = TopEvaluate(p,self.unionGraph,passedBindings = {},DEBUG=False) # rt = SPARQLQueryResult(rt).serialize(format='python') # self.failUnless(len(rt) == 1,"Expected 1 item solution set") # for ppd in rt: # self.failUnless(ppd == URIRef('http://example.org/foaf/aliceFoaf'), # "Unexpected ?mbox binding :\n %s" % ppd) def testExpressions(self): from rdflib.sparql.bison.Processor import Parse global prolog for inExpr,outExpr in ExprTests: p = Parse(inExpr) prolog = p.prolog p = p.query.whereClause.parsedGraphPattern.graphPatterns if prolog is None: from rdflib.sparql.bison.Query import Prolog prolog = Prolog(u'',[]) if not hasattr(prolog,'DEBUG'): prolog.DEBUG = False self.assertEquals(repr(reduce(ReduceToAlgebra,p,None)),outExpr) def testSimpleGraphPattern(self): from rdflib.sparql.bison.Processor import Parse global prolog p = Parse("BASE SELECT ?ptrec WHERE { GRAPH ?ptrec { ?data :foo 'bar'. } }") prolog = p.prolog p = p.query.whereClause.parsedGraphPattern.graphPatterns if prolog is None: from rdflib.sparql.bison.Query import Prolog prolog = Prolog(u'',[]) prolog.DEBUG = True assert isinstance(reduce(ReduceToAlgebra,p,None),GraphExpression) # def testGraphEvaluation(self): # from rdflib.sparql.bison.Processor import Parse # p = Parse(TEST10) # print TEST10 # rt = TopEvaluate(p,self.unionGraph,passedBindings = {}) # from rdflib.sparql.QueryResult import SPARQLQueryResult # rt = SPARQLQueryResult(rt).serialize(format='python') # self.failUnless(len(rt) == 1,"Expected 1 item solution set") # for mbox,nick,ppd in rt: # self.failUnless(mbox == URIRef('mailto:bob@work.example'), # "Unexpected ?mbox binding :\n %s" % mbox) # self.failUnless(nick == Literal("Robert"), # "Unexpected ?nick binding :\n %s" % nick) # self.failUnless(ppd == URIRef('http://example.org/foaf/bobFoaf'), # "Unexpected ?ppd binding :\n %s" % ppd) if __name__ == '__main__': unittest.main() rdflib-2.4.2/rdflib/sparql/graphPattern.py0000755000175000017500000003675111153616034017517 0ustar nachonacho# -*- coding: utf-8 -*- # # # $Date: 2005/11/04 14:06:36 $, by $Author: ivan $, $Revision: 1.1 $ # """ Graph pattern class used by the SPARQL implementation """ import sys, os, time, datetime from rdflib.Literal import Literal from rdflib.BNode import BNode from rdflib.URIRef import URIRef from rdflib import Variable from types import * from rdflib.syntax.NamespaceManager import NamespaceManager from rdflib.Graph import Graph from rdflib.sparql import _questChar, Debug, SPARQLError def _createResource(v) : """Create an RDFLib Literal instance with the corresponding XML Schema datatype set. If the variable is already an RDFLib resource, it simply returns the resource; otherwise the corresponding Literal. A SPARQLError Exception is raised if the type is not implemented. The Literal contains the string representation of the variable (as Python does it by default) with the corresponding XML Schema URI set. @param v: Python variable @return: either an RDFLib Literal (if 'v' is not an RDFLib Resource), or the same variable if it is already an RDFLib resource (ie, Literal, BNode, or URIRef) @raise SPARQLError: if the type of 'v' is not implemented """ if isinstance(v,Literal) or isinstance(v,BNode) or isinstance(v,URIRef) : # just do nothing return v else : return Literal(v) # Literal now does the datatype bits def _isResQuest(r) : """ Is 'r' a request string (ie, of the form "?XXX")? @rtype: Boolean """ if r and isinstance(r,basestring) and r[0] == _questChar : return True return False class GraphPattern : """ Storage of one Graph Pattern, ie, the pattern tuples and the possible (functional) constraints (filters) """ def __init__(self,patterns=[]) : """ @param patterns: an initial list of graph pattern tuples """ self.patterns = [] self.constraints = [] self.unbounds = [] self.bnodes = {} if type(patterns) == list : self.addPatterns(patterns) elif type(patterns) == tuple : self.addPattern(patterns) else : raise SPARQLError("illegal argument, pattern must be a tuple or a list of tuples") def _generatePattern(self,tupl) : """ Append a tuple to the local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either 3 elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint (filter). (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). @param tupl: either a three or four element tuple """ if type(tupl) != tuple : raise SPARQLError("illegal argument, pattern must be a tuple, got %s" % type(tupl)) if len(tupl) != 3 and len(tupl) != 4 : raise SPARQLError("illegal argument, pattern must be a tuple of 3 or 4 element, got %s" % len(tupl)) if len(tupl) == 3 : (s,p,o) = tupl f = None else : (s,p,o,f) = tupl final=[] for c in (s,p,o) : if _isResQuest(c) : if not c in self.unbounds : self.unbounds.append(c) final.append(c) elif isinstance(c, BNode): #Do nothing - BNode name management is handled by SPARQL parser # if not c in self.bnodes : # self.bnodes[c] = BNode() final.append(c) else : final.append(_createResource(c)) final.append(f) return tuple(final) def addPattern(self,tupl) : """ Append a tuple to the local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either 3 elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint (filter). (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). @param tupl: either a three or four element tuple """ self.patterns.append(self._generatePattern(tupl)) def insertPattern(self,tupl) : """ Insert a tuple to to the start of local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either 3 elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint (filter). (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). Semantically, the behaviour induced by a graphPattern does not depend on the order of the patterns. However, due to the behaviour of the expansion algorithm, users may control the speed somewhat by adding patterns that would 'cut' the expansion tree soon (ie, patterns that reduce the available triplets significantly). API users may be able to do that, hence this additional method. @param tupl: either a three or four element tuple """ self.patterns.insert(0,self._generatePattern(tupl)) def addPatterns(self,lst) : """ Append a list of tuples to the local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either three elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint. (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). @param lst: list consisting of either a three or four element tuples """ for l in lst: self.addPattern(l) def insertPatterns(self,lst) : """ Insert a list of tuples to the start of the local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either three elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint. (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). Semantically, the behaviour induced by a graphPattern does not depend on the order of the patterns. However, due to the behaviour of the expansion algorithm, users may control the speed somewhat by adding patterns that would 'cut' the expansion tree soon (ie, patterns that reduce the available triplets significantly). API users may be able to do that, hence this additional method. @param lst: list consisting of either a three or four element tuples """ for i in xrange(len(lst)-1,-1,-1) : self.insertPattern(lst[i]) def addConstraint(self,func) : """ Add a global filter constraint to the graph pattern. 'func' must be a method with a single input parameter (a dictionary) returning a boolean. This method is I{added} to previously added methods, ie, I{all} methods must return True to accept a binding. @param func: filter function """ if type(func) == FunctionType : self.constraints.append(func) else : raise SPARQLError("illegal argument, constraint must be a function type, got %s" % type(func)) def addConstraints(self,lst) : """ Add a list of global filter constraints to the graph pattern. Each function in the list must be a method with a single input parameter (a dictionary) returning a boolean. These methods are I{added} to previously added methods, ie, I{all} methods must return True to accept a binding. @param lst: list of functions """ for l in lst: self.addConstraint(l) def construct(self,tripleStore,bindings) : """ Add triples to a tripleStore based on a variable bindings of the patterns stored locally. The triples are patterned by the current Graph Pattern. The method is used to construct a graph after a successful querying. @param tripleStore: an (rdflib) Triple Store @param bindings: dictionary """ localBnodes = {} for c in self.bnodes : localBnodes[c] = BNode() def bind(st) : if _isResQuest(st) : if st in bindings : return bindings[st] else : if isinstance(self,GraphPattern2) : return st else : return None elif isinstance(st,BNode) : for c in self.bnodes : if self.bnodes[c] == st : # this is a BNode that was created as part of building up the pattern return localBnodes[c] # if we got here, the BNode comes from somewhere else... return st else : return st for pattern in self.patterns : (s,p,o,f) = pattern triplet = [] valid = True for res in (s,p,o) : val = bind(res) if val != None : triplet.append(val) else : valid = False break if valid : tripleStore.add(tuple(triplet)) def __add__(self,other) : """Adding means concatenating all the patterns and filters arrays""" retval = GraphPattern() retval += self retval += other return retval def __iadd__(self,other) : """Adding means concatenating all the patterns and filters arrays""" self.patterns += other.patterns self.constraints += other.constraints for c in other.unbounds : if not c in self.unbounds : self.unbounds.append(c) for c in other.bnodes : if not c in self.bnodes : self.bnodes[c] = other.bnodes[c] return self def __str__(self) : return self.__repr__() def isEmpty(self) : """Is the pattern empty? @rtype: Boolean """ return len(self.patterns) == 0 class BasicGraphPattern(GraphPattern) : """One, justified, problem with the current definition of L{GraphPattern} is that it makes it difficult for users to use a literal of the type "?XXX", because any string beginning with "?" will be considered to be an unbound variable. The only way of doing this is that the user explicitly creates a Literal object and uses that as part of the pattern. This class is a superclass of L{GraphPattern} which does I{not} do this, but requires the usage of a separate variable class instance""" def __init__(self,patterns=[],prolog=None) : """ @param patterns: an initial list of graph pattern tuples """ GraphPattern.__init__(self,patterns) self.prolog = prolog def canonicalTerm(self,term): if isinstance(term,URIRef): if self.prolog is not None: namespace_manager = NamespaceManager(Graph()) for prefix,uri in self.prolog.prefixBindings.items(): namespace_manager.bind(prefix, uri, override=False) try: prefix,uri,localName=namespace_manager.compute_qname(term) except: return term if prefix not in self.prolog.prefixBindings: return term else: return u':'.join([prefix,localName]) else: return term elif isinstance(term,Literal): return term.n3() elif isinstance(term,BNode): return term.n3() else: assert isinstance(term,Variable) return term.n3() def __repr__(self): # from pprint import pformat if self.constraints: #return "Filter(.. a filter ..,BGP(%s))"%(','.join([pformat(p[:3]) for p in self.patterns])) return "Filter(.. a filter ..,BGP(%s))"%(','.join([','.join([self.canonicalTerm(pat[0]), self.canonicalTerm(pat[1]), self.canonicalTerm(pat[2])]) for pat in self.patterns])) else: #return "BGP(%s)"%(','.join([repr(p[:3]) for p in self.patterns])) return "BGP(%s)"%(','.join(['('+','.join([self.canonicalTerm(s), self.canonicalTerm(p), self.canonicalTerm(o)])+')' for s,p,o,f in self.patterns])) retval = " Patterns: %s\n" % self.patterns retval += " Constraints: %s\n" % self.constraints retval += " Unbounds: %s\n" % self.unbounds return retval def _generatePattern(self,tupl) : """ Append a tuple to the local patterns. Possible type literals are converted to real literals on the fly. Each tuple should be contain either 3 elements (for an RDF Triplet pattern) or four, where the fourth element is a per-pattern constraint (filter). (The general constraint of SPARQL can be optimized by assigning a constraint to a specific pattern; because it stops the graph expansion, its usage might be much more optimal than the the 'global' constraint). @param tupl: either a three or four element tuple """ if type(tupl) != tuple : raise SPARQLError("illegal argument, pattern must be a tuple, got %s" % type(tupl)) if len(tupl) != 3 and len(tupl) != 4 : raise SPARQLError("illegal argument, pattern must be a tuple of 3 or 4 element, got %s" % len(tupl)) if len(tupl) == 3 : (s,p,o) = tupl f = None else : (s,p,o,f) = tupl final=[] for c in (s,p,o) : if isinstance(c,Variable) : if not c in self.unbounds : self.unbounds.append(c) final.append(c) elif isinstance(c, BNode): #Do nothing - BNode name management is handled by SPARQL parser final.append(c) else : final.append(_createResource(c)) final.append(f) return tuple(final) if __name__ == '__main__' : v1 = Variable("a") g = BasicGraphPattern([("a","?b",24),("?r","?c",12345),(v1,"?c",3333)]) print g rdflib-2.4.2/rdflib/sparql/QueryResult.py0000644000175000017500000004065711153616034017361 0ustar nachonachofrom rdflib import QueryResult,URIRef,BNode,Literal, Namespace from rdflib.Graph import Graph from xml.dom import XML_NAMESPACE from xml.sax.saxutils import XMLGenerator from xml.sax.xmlreader import AttributesNSImpl from cStringIO import StringIO SPARQL_XML_NAMESPACE = u'http://www.w3.org/2005/sparql-results#' try: from Ft.Xml import MarkupWriter class SPARQLXMLWriter: """ 4Suite-based SPARQL XML Writer """ def __init__(self,output): self.writer = MarkupWriter(output, indent=u"yes") self.writer.startDocument() self.writer.startElement(u'sparql',namespace=SPARQL_XML_NAMESPACE) self.askResult=False def write_header(self,allvarsL): self.writer.startElement(u'head', namespace=SPARQL_XML_NAMESPACE) if allvarsL: for i in xrange(0,len(allvarsL)) : self.writer.startElement(u'variable',namespace=SPARQL_XML_NAMESPACE,attributes={u'name':unicode(allvarsL[i])}) self.writer.endElement(u'variable') self.writer.endElement( u'head') def write_results_header(self,orderBy,distinct): self.writer.startElement(u'results',namespace=SPARQL_XML_NAMESPACE,attributes={u'ordered' : unicode(orderBy and 'true' or 'false'), u'distinct': unicode(distinct and 'true' or 'false')}) def write_start_result(self): self.writer.startElement(u'result',namespace=SPARQL_XML_NAMESPACE) self._resultStarted = True def write_end_result(self): assert self._resultStarted self.writer.endElement(u'result',namespace=SPARQL_XML_NAMESPACE) self._resultStarted = False def write_ask(self,val): self.writer.startElement(u'boolean', namespace=SPARQL_XML_NAMESPACE) self.writer.text((val and u't' or u'f')+unicode(val)[1:]) self.writer.endElement(u'boolean') self.askResult=True def write_binding(self,name,val): assert self._resultStarted if val: attrs = {u'name':unicode(name)} self.writer.startElement(u'binding', namespace=SPARQL_XML_NAMESPACE, attributes=attrs) if isinstance(val,URIRef) : self.writer.startElement(u'uri', namespace=SPARQL_XML_NAMESPACE) self.writer.text(val) self.writer.endElement(u'uri') elif isinstance(val,BNode) : self.writer.startElement(u'bnode', namespace=SPARQL_XML_NAMESPACE) self.writer.text(val) self.writer.endElement(u'bnode') elif isinstance(val,Literal) : attrs = {} if val.language : attrs[(u'lang', XML_NAMESPACE)] = unicode(val.language) elif val.datatype: attrs[u'datatype'] = unicode(val.datatype) self.writer.startElement(u'literal', namespace=SPARQL_XML_NAMESPACE, attributes=attrs) self.writer.text(val) self.writer.endElement(u'literal') else: raise Exception("Unsupported RDF term: %s"%val) self.writer.endElement(u'binding') def close(self): if not self.askResult: self.writer.endElement(u'results') self.writer.endElement(u'sparql') except: class SPARQLXMLWriter: """ Python saxutils-based SPARQL XML Writer """ def __init__(self, output, encoding='utf-8'): writer = XMLGenerator(output, encoding) writer.startDocument() writer.startPrefixMapping(u'sparql',SPARQL_XML_NAMESPACE) writer.startPrefixMapping(u'xml', XML_NAMESPACE) writer.startElementNS((SPARQL_XML_NAMESPACE, u'sparql'), u'sparql', AttributesNSImpl({}, {})) self.writer = writer self._output = output self._encoding = encoding self.askResult=False def write_header(self,allvarsL): self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'head'), u'head', AttributesNSImpl({}, {})) for i in xrange(0,len(allvarsL)) : attr_vals = { (None, u'name'): unicode(allvarsL[i]), } attr_qnames = { (None, u'name'): u'name', } self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'variable'), u'variable', AttributesNSImpl(attr_vals, attr_qnames)) self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'variable'), u'variable') self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'head'), u'head') def write_ask(self,val): raise def write_results_header(self,orderBy,distinct): attr_vals = { (None, u'ordered') : unicode(orderBy and 'true' or 'false'), (None, u'distinct') : unicode(distinct and 'true' or 'false'), } attr_qnames = { (None, u'ordered') : u'ordered', (None, u'distinct') : u'distinct' } self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'results'), u'results', AttributesNSImpl(attr_vals, attr_qnames)) def write_start_result(self): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, u'result'), u'result', AttributesNSImpl({}, {})) self._resultStarted = True def write_end_result(self): assert self._resultStarted self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'result'), u'result') self._resultStarted = False def write_binding(self,name,val): assert self._resultStarted if val: attr_vals = { (None, u'name') : unicode(name), } attr_qnames = { (None, u'name') : u'name', } self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'binding'), u'binding', AttributesNSImpl(attr_vals, attr_qnames)) if isinstance(val,URIRef) : self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'uri'), u'uri', AttributesNSImpl({}, {})) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'uri'),u'uri') elif isinstance(val,BNode) : self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'bnode'), u'bnode', AttributesNSImpl({}, {})) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'bnode'),u'bnode') elif isinstance(val,Literal) : attr_vals = {} attr_qnames = {} if val.language : attr_vals[(XML_NAMESPACE, u'lang')] = val.language attr_qnames[(XML_NAMESPACE, u'lang')] = u"xml:lang" elif val.datatype: attr_vals[(None,u'datatype')] = val.datatype attr_qnames[(None,u'datatype')] = u'datatype' self.writer.startElementNS((SPARQL_XML_NAMESPACE, u'literal'), u'literal', AttributesNSImpl(attr_vals, attr_qnames)) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'literal'),u'literal') else: raise Exception("Unsupported RDF term: %s"%val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'binding'),u'binding') def close(self): self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'results'), u'results') self.writer.endElementNS((SPARQL_XML_NAMESPACE, u'sparql'), u'sparql') self.writer.endDocument() def retToJSON(val): if isinstance(val,URIRef): return '"type": "uri", "value" : "%s"' % val elif isinstance(val,BNode) : return '"type": "bnode", "value" : "%s"' % val elif isinstance(val,Literal): if val.language != "": return '"type": "literal", "xml:lang" : "%s", "value" : "%s"' % (val.language, val) attr += ' xml:lang="%s" ' % val.language elif val.datatype != "" and val.datatype != None: return '"type": "typed=literal", "datatype" : "%s", "value" : "%s"' % (val.datatype, val) else: return '"type": "literal", "value" : "%s"' % val else: return '"type": "literal", "value" : "%s"' % val def bindingJSON(name, val): if val == None: return "" retval = '' retval += ' "%s" : {' % name retval += retToJSON(val) # retval += '}\n' return retval class SPARQLQueryResult(QueryResult.QueryResult): """ Query result class for SPARQL xml : as an XML string conforming to the SPARQL XML result format: http://www.w3.org/TR/rdf-sparql-XMLres/ python: as Python objects json : as JSON graph : as an RDFLib Graph - for CONSTRUCT and DESCRIBE queries """ def __init__(self,qResult): """ The constructor is the result straight from sparql-p, which is uple of 1) a list of tuples (in select order, each item is the valid binding for the corresponding variable or 'None') for SELECTs , a SPARQLGraph for DESCRIBE/CONSTRUCT, and boolean for ASK 2) the variables selected 3) *all* the variables in the Graph Patterns 4) the order clause 5) the DISTINCT clause """ self.construct=False if isinstance(qResult,bool): self.askAnswer = [qResult] result=None selectionF=None allVars=None orderBy=None distinct=None topUnion = None elif isinstance(qResult,Graph): self.askAnswer = [] result=qResult self.construct=True selectionF=None allVars=None orderBy=None distinct=None topUnion = None else: self.askAnswer = [] result,selectionF,allVars,orderBy,distinct,topUnion = qResult self.result = result self.topUnion = topUnion self.selected = result self.selectionF = selectionF self.allVariables = allVars self.orderBy = orderBy self.distinct = distinct def __len__(self): if isinstance(self.selected,list): return len(self.selected) else: return 1 def __iter__(self): """Iterates over the result entries""" if isinstance(self.selected,list): for item in self.selected: if isinstance(item,basestring): yield (item,) else: yield item else: yield self.selected def serialize(self,format='xml'): if isinstance(self.result,Graph): return self.result.serialize(format=format) elif format == 'python': return self.selected elif format in ['json','xml']: retval = "" allvarsL = self.allVariables if format == "json" : retval += ' "results" : {\n' retval += ' "ordered" : %s,\n' % (self.orderBy and 'true' or 'false') retval += ' "distinct" : %s,\n' % (self.distinct and 'true' or 'false') retval += ' "bindings" : [\n' for i in xrange(0,len(self.selected)): hit = self.selected[i] retval += ' {\n' bindings = [] if len(self.selectionF) == 0: for j in xrange(0, len(allvarsL)): b = bindingJSON(allvarsL[j],hit[j]) if b != "": bindings.append(b) elif len(self.selectionF) == 1: bindings.append(bindingJSON(self.selectionF[0],hit)) else: for j in xrange(0, len(self.selectionF)): b = bindingJSON(self.selectionF[j],hit[j]) if b != "": bindings.append(b) retval += "},\n".join(bindings) retval += "}\n" retval += ' }' if i != len(self.selected) -1: retval += ',\n' else: retval += '\n' retval += ' ]\n' retval += ' }\n' retval += '}\n' selected_vars = self.selectionF if len(selected_vars) == 0: selected_vars = allvarsL header = "" header += '{\n' header += ' "head" : {\n "vars" : [\n' for i in xrange(0,len(selected_vars)) : header += ' "%s"' % selected_vars[i] if i == len(selected_vars) - 1 : header += '\n' else : header += ',\n' header += ' ]\n' header += ' },\n' retval = header + retval elif format == "xml" : # xml output out = StringIO() writer = SPARQLXMLWriter(out) if self.askAnswer: writer.write_header(allvarsL) writer.write_ask(self.askAnswer[0]) else: writer.write_header(allvarsL) writer.write_results_header(self.orderBy,self.distinct) if self.topUnion: for binding in self.topUnion: writer.write_start_result() if isinstance(binding,dict): for key,val in binding.items(): if not self.selectionF or \ key in self.selectionF: writer.write_binding(key,val) else: print self.selectionF,binding raise writer.write_end_result() else: for i in xrange(0,len(self.selected)) : hit = self.selected[i] if len(self.selectionF) == 0 : if self.topUnion: print topUnion raise writer.write_start_result() if len(allvarsL) == 1: hit = (hit,) # Not an iterable - a parser bug? for j in xrange(0,len(allvarsL)) : if not len(hit) < j+1: writer.write_binding(allvarsL[j],hit[j]) writer.write_end_result() elif len(self.selectionF) == 1 : writer.write_start_result() writer.write_binding(self.selectionF[0],hit) writer.write_end_result() else: writer.write_start_result() for j in xrange(0,len(self.selectionF)) : writer.write_binding(self.selectionF[j],hit[j]) writer.write_end_result() writer.close() return out.getvalue() return retval else: raise Exception("Result format not implemented: %s"%format) rdflib-2.4.2/rdflib/sparql/sparqlOperators.py0000755000175000017500000003722211153616034020253 0ustar nachonacho# -*- coding: utf-8 -*- # # # $Date: 2005/11/04 14:06:36 $, by $Author: ivan $, $Revision: 1.1 $ # ## # API for the SPARQL operators. The operators (eg, 'lt') # return a function that can be added to the AND clause of a query. The parameters are either regular values # or query strings. The resulting function has one parameter (the binding directory), it can be combined with others or # be plugged to into an array of constraints. For example: #
#   constraints = [lt("?m",42)]
#
#

for checking whether "?m" is smaller than the (integer) value 42. It can be combined using the lambda function, for # example:

#
#    constraints = [lambda(b) : lt("?m",42")(b) or lt("?n",134)(b)]
# 
#

is the expression for:

#
#    AND ?m < 42 || ?n < 134
# 
#

(Clearly, the relative complexity is only on the API level; a SPARQL language parser that starts with a SPARQL # expression can map on this API).

# ## import sys, os, re from rdflib.Literal import Literal, _XSD_NS from rdflib.BNode import BNode from rdflib.URIRef import URIRef from rdflib import Variable from rdflib.sparql.graphPattern import _createResource from rdflib.sparql import _questChar, Debug ## # Boolean test whether this is a a query string or not # @param v the value to be checked # @return True if it is a query string def queryString(v) : return isinstance(v,basestring) and len(v) != 0 and v[0] == _questChar ## # Return the value in a literal, making on the fly conversion on datatype (using the datatypes that are implemented) # @param v the Literal to be converted # @return the result of the conversion. def getLiteralValue(v) : return v ## # Returns a value retrieval function. The return value can be plugged in a query; it would return # the value of param directly if param is a real value, and the run-time value if param is a query string of the type # "?xxx". If no binding is defined at the time of call, the return value is None # @param param query string, Unbound instance, or real value # @return a function taking one parameter (the binding directory) def getValue(param) : if isinstance(param,Variable) : unBound = True else : unBound = queryString(param) if not unBound : if isinstance(param,Literal) : value = getLiteralValue(param) elif callable(param): return param else : value = param return lambda(bindings): value def f(bindings) : if unBound : #@@note, param must be reassigned to avoid tricky issues of scope #see: http://docs.python.org/ref/naming.html _param = isinstance(param,Variable) and param or Variable(param[1:]) val = bindings[_param] if isinstance(val,Literal) : return getLiteralValue(val) else : return val else : return value return f ## # Operator for '<' # @param a value or query string # @param b value or query string # @return comparison method def lt(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) < fb(bindings) except: # raise # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f ## # Operator for '<=' # @param a value or query string # @param b value or query string # @return comparison method def le(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) <= fb(bindings) except : # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f ## # Operator for '>' # @param a value or query string # @param b value or query string # @return comparison method def gt(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) > fb(bindings) except : # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f ## # Operator for '>=' # @param a value or query string # @param b value or query string # @return comparison method def ge(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) >= fb(bindings) except : # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f ## # Operator for '=' # @param a value or query string # @param b value or query string # @return comparison method def eq(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) == fb(bindings) except : # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f ## # Operator for '!=' # @param a value or query string # @param b value or query string # @return comparison method def neq(a,b) : fa = getValue(a) fb = getValue(b) def f(bindings) : try : return fa(bindings) != fb(bindings) except : # this is the case when the operators are incompatible if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f def __getVariableName(v): if isinstance(v, Variable): return v elif queryString(v): return v[1:] else: return None ## # Is the variable bound # @param a value or query string # @return check method def bound(a) : v = __getVariableName(a) def f(bindings) : if v == None : return False if v in bindings : val = bindings[v] return not (val == None) else : return False return f ## # Is the variable bound to a URIRef # @param a value or query string # @return check method def isURI(a) : v = __getVariableName(a) def f(bindings) : if v == None : return False try : val = bindings[v] if val == None: return False else : return isinstance(val,URIRef) except : return False return f ## # Is the variable bound to a IRIRef (this is just an alias for URIRef) # @param a value or query string # @return check method def isIRI(a) : return isURI(a) ## # Is the variable bound to a Blank Node # @param a value or query string # @return check method def isBlank(a) : v = __getVariableName(a) def f(bindings) : if v == None : return False try : val = bindings[v] if val == None: return False else : return isinstance(val,BNode) except : return False return f ## # Is the variable bound to a Literal # @param a value or query string # @return check method def isLiteral(a) : v = __getVariableName(a) def f(bindings) : if v == None : return False try : val = bindings[v] if val == None: return False else : return isinstance(val,Literal) except : return False return f ## # Return the string version of a resource # @param a value or query string # @return check method def str(a) : v = __getVariableName(a) def f(bindings) : if v == None : return "" try : val = bindings[v] if val == None: return "" else : from __builtin__ import str as _str return _str(val) except : return "" return f ## # Return the lang value of a literal # @param a value or query string # @return check method def lang(a) : v = __getVariableName(a) def f(bindings) : if v == None: return "" try : val = bindings[v] if val == None: return "" else : return val.language except : return "" return f ## # Return the datatype URI of a literal # @param a value or query string # @return check method def datatype(a) : v = __getVariableName(a) def f(bindings) : if v == None: if isinstance(a,Literal): return a.datatype else: raise TypeError(a) try : val = bindings[v] if val == None: return TypeError(v) elif isinstance(val,Literal) and not val.language: return val.datatype else: raise TypeError(val) except : raise TypeError(v) return f ## # Is a resource on a collection. The operator can be used to check whether # the 'item' is an element of the 'collection' (a.k.a. list). Both collection and item can # be a real resource or a query string. # @param collection is either a query string (that has to be bound by the query) or an RDFLib Resource # representing the collection # @param item is either a query string (that has to be bound by the query), an RDFLib Resource, or # a data type value that is turned into a corresponding Literal (with possible datatype) # that must be tested to be part of the collection # @defreturn a function def isOnCollection(collection,item, triplets) : """Generate a method that can be used as a global constaint in sparql to check whether the 'item' is an element of the 'collection' (a.k.a. list). Both collection and item can be a real resource or a query string. Furthermore, item might be a plain string, that is then turned into a literal run-time. The method returns an adapted method. """ #check_subject(collection) collUnbound = False if isinstance(collection,Variable) : collUnbound = True collection = collection elif queryString(collection) : # just keep 'collection', no reason to reassign collUnbound = True else: collUnbound = False # if we got here, this is a valid collection resource if isinstance(item,Variable) : queryItem = item itUnbund = True elif queryString(item) : queryItem = item itUnbound = True else : # Note that an exception is raised if the 'item' is invalid queryItem = _createResource(item) itUnbound = False def checkCollection(bindings) : try : if collUnbound == True : # the binding should come from the binding coll = bindings[collection] else : coll = collection if itUnbound == True : it = bindings[queryItem] else : it = queryItem return it in triplets.items(coll) except : # this means that the binding is not available. But that also means that # the global constraint was used, for example, with the optional triplets; # not available binding means that the method is irrelevant for those # ie, it should not become a show-stopper, hence it returns True return True return checkCollection def addOperator(args,combinationArg): """ SPARQL numeric + operator implemented via Python """ return ' + '.join(["sparqlOperators.getValue(%s)%s"%(i,combinationArg and "(%s)"%combinationArg or '') for i in args]) def XSDCast(source,target=None): """ XSD Casting/Construction Support For now (this may be an issue since Literal doesn't override comparisons) it simply creates a Literal with the target datatype using the 'lexical' value of the source """ sFunc = getValue(source) def f(bindings): rt = sFunc(bindings) if isinstance(rt,Literal) and rt.datatype == target: #Literal already has target datatype return rt else: return Literal(rt,datatype=target) return f def regex(item,pattern,flag=None): """ Invokes the XPath fn:matches function to match text against a regular expression pattern. The regular expression language is defined in XQuery 1.0 and XPath 2.0 Functions and Operators section 7.6.1 Regular Expression Syntax """ a = getValue(item) b = getValue(pattern) if flag: cFlag = 0 usedFlags = [] #Maps XPath REGEX flags (http://www.w3.org/TR/xpath-functions/#flags) to Python's re flags for fChar,_flag in [('i',re.IGNORECASE),('s',re.DOTALL),('m',re.MULTILINE)]: if fChar in flag and fChar not in usedFlags: cFlag |= _flag usedFlags.append(fChar) def f1(bindings): try: return bool(re.compile(b(bindings),cFlag).search(a(bindings))) except: return False return f1 else: def f2(bindings): try: return bool(re.compile(b(bindings)).search(a(bindings))) except: return False return f2 def f(bindings): try: return bool(re.compile(a(bindings)).search(b(bindings))) except Exception,e: print e return False return f def EBV(a): """ * If the argument is a typed literal with a datatype of xsd:boolean, the EBV is the value of that argument. * If the argument is a plain literal or a typed literal with a datatype of xsd:string, the EBV is false if the operand value has zero length; otherwise the EBV is true. * If the argument is a numeric type or a typed literal with a datatype derived from a numeric type, the EBV is false if the operand value is NaN or is numerically equal to zero; otherwise the EBV is true. * All other arguments, including unbound arguments, produce a type error. """ fa = getValue(a) def f(bindings) : try : rt = fa(bindings) if isinstance(rt,Literal): if rt.datatype == _XSD_NS.boolean: ebv = rt.toPython() elif rt.datatype == _XSD_NS.string or rt.datatype is None: ebv = len(rt) > 0 else: pyRT = rt.toPython() if isinstance(pyRT,Literal): #Type error, see: http://www.w3.org/TR/rdf-sparql-query/#ebv raise TypeError("http://www.w3.org/TR/rdf-sparql-query/#ebv") else: ebv = pyRT != 0 return ebv else: print rt, type(rt) raise except Exception,e: if isinstance(e,KeyError): #see: http://www.w3.org/TR/rdf-sparql-query/#ebv raise TypeError("http://www.w3.org/TR/rdf-sparql-query/#ebv") # this is the case when the operators are incompatible raise if Debug : (typ,val,traceback) = sys.exc_info() sys.excepthook(typ,val,traceback) return False return f rdflib-2.4.2/rdflib/sparql/__init__.py0000755000175000017500000001335511153616034016612 0ustar nachonacho# -*- coding: utf-8 -*- # # # $Date: 2005/11/04 14:06:36 $, by $Author: ivan $, $Revision: 1.1 $ # The documentation of the module, hence the convention for the documentation of methods and classes, # is based on the epydoc tool. This tool parses Python source files # and generates API descriptions XHTML. # The latest release of epydoc (version 2.0) can be # downloaded from the SourceForge # download page. # # """ TODO: merge this first bit from sparql.sparql.py into rest of doc... updating all along the way. SPARQL implementation on top of RDFLib Implementation of the W3C SPARQL language (version April 2005). The basic class here is supposed to be a superclass of L{rdflib.sparql.sparqlGraph}; it has been separated only for a better maintainability. There is a separate U{description} for the functionalities. For a general description of the SPARQL API, see the separate, more complete U{description}. Variables, Imports ================== The top level (__init__.py) module of the Package imports the important classes. In other words, the user may choose to use the following imports only:: from rdflibUtils import myTripleStore from rdflibUtils import retrieveRDFFiles from rdflibUtils import SPARQLError from rdflibUtils import GraphPattern The module imports and/or creates some frequently used Namespaces, and these can then be imported by the user like:: from rdflibUtils import ns_rdf Finally, the package also has a set of convenience string defines for XML Schema datatypes (ie, the URI-s of the datatypes); ie, one can use:: from rdflibUtils import type_string from rdflibUtils import type_integer from rdflibUtils import type_long from rdflibUtils import type_double from rdflibUtils import type_float from rdflibUtils import type_decimal from rdflibUtils import type_dateTime from rdflibUtils import type_date from rdflibUtils import type_time from rdflibUtils import type_duration These are used, for example, in the sparql-p implementation. The three most important classes in RDFLib for the average user are Namespace, URIRef and Literal; these are also imported, so the user can also use, eg:: from rdflibUtils import Namespace, URIRef, Literal History ======= - Version 1.0: based on an earlier version of the SPARQL, first released implementation - Version 2.0: version based on the March 2005 SPARQL document, also a major change of the core code (introduction of the separate L{GraphPattern} class, etc). - Version 2.01: minor changes only: - switch to epydoc as a documentation tool, it gives a much better overview of the classes - addition of the SELECT * feature to sparql-p - Version 2.02: - added some methods to L{myTripleStore} to handle C{Alt} and C{Bag} the same way as C{Seq} - added also methods to I{add} collections and containers to the triple store, not only retrieve them - Version 2.1: adapted to the inclusion of the code into rdflib, thanks to Michel Pelletier - Version 2.2: added the sorting possibilities; introduced the Unbound class and have a better interface to patterns using this (in the BasicGraphPattern class) @author: U{Ivan Herman} @license: This software is available for use under the U{W3C Software License} @contact: Ivan Herman, ivan@ivan-herman.net @version: 2.2 """ from rdflib import URIRef DESCRIBE=URIRef('http://www.w3.org/TR/rdf-sparql-query/#describe') __version__ = "2.2" Debug = False # Note that the SPARQL draft allows the usage of a different query character, but I decided not to be that # generous, and keep to one only. I put it into a separate variable to avoid problems if the group decides # to change the syntax on that detail... _questChar = "?" def generateCollectionConstraint(triplets,collection,item) : """ Generate a function that can then be used as a global constaint in sparql to check whether the 'item' is an element of the 'collection' (a.k.a. list). Both collection and item can be a real resource or a query string. Furthermore, item might be a plain string, that is then turned into a literal run-time. The function returns an adapted filter method that can then be plugged into a sparql request. @param triplets: the L{SPARQLGraph} instance @param collection: is either a query string (that has to be bound by the query) or an RDFLib Resource representing the collection @param item: is either a query string (that has to be bound by the query) or an RDFLib Resource that must be tested to be part of the collection @rtype: a function suitable as a sparql filter @raises SPARQLError: if the collection or the item parameters are illegal (not valid resources for a collection or an object) """ return isOnCollection(collection,item, triples) ############################################################################################ class Processor(object): def __init__(self, graph): pass def query(self, strOrQuery, initBindings={}, initNs={}, DEBUG=False): pass from rdflib.exceptions import Error ## # SPARQL Error Exception (subclass of the RDFLib Exceptions) class SPARQLError(Error) : """Am SPARQL error has been detected""" def __init__(self,msg): Error.__init__(self, ("SPARQL Error: %s." % msg)) rdflib-2.4.2/rdflib/Statement.py0000644000175000017500000000042111153616035015501 0ustar nachonachofrom rdflib.Node import Node class Statement(Node, tuple): def __new__(cls, (subject, predicate, object), context): return tuple.__new__(cls, ((subject, predicate, object), context)) def __reduce__(self): return (Statement, (self[0], self[1])) rdflib-2.4.2/rdflib/syntax/0000755000175000017500000000000011204354476014521 5ustar nachonachordflib-2.4.2/rdflib/syntax/xml_names.py0000644000175000017500000000544311153616030017052 0ustar nachonacho# From: http://www.w3.org/TR/REC-xml#NT-CombiningChar # # * Name start characters must have one of the categories Ll, Lu, Lo, # Lt, Nl. # # * Name characters other than Name-start characters must have one of # the categories Mc, Me, Mn, Lm, or Nd. # # * Characters in the compatibility area (i.e. with character code # greater than #xF900 and less than #xFFFE) are not allowed in XML # names. # # * Characters which have a font or compatibility decomposition # (i.e. those with a "compatibility formatting tag" in field 5 of the # database -- marked by field 5 beginning with a "<") are not allowed. # # * The following characters are treated as name-start characters rather # than name characters, because the property file classifies them as # Alphabetic: [#x02BB-#x02C1], #x0559, #x06E5, #x06E6. # # * Characters #x20DD-#x20E0 are excluded (in accordance with Unicode # 2.0, section 5.14). # # * Character #x00B7 is classified as an extender, because the property # list so identifies it. # # * Character #x0387 is added as a name character, because #x00B7 is its # canonical equivalent. # # * Characters ':' and '_' are allowed as name-start characters. # # * Characters '-' and '.' are allowed as name characters. from unicodedata import category, decomposition NAME_START_CATEGORIES = ["Ll", "Lu", "Lo", "Lt", "Nl"] NAME_CATEGORIES = NAME_START_CATEGORIES + ["Mc", "Me", "Mn", "Lm", "Nd"] ALLOWED_NAME_CHARS = [u"\u00B7", u"\u0387", u"-", u".", u"_"] # http://www.w3.org/TR/REC-xml-names/#NT-NCName # [4] NCName ::= (Letter | '_') (NCNameChar)* /* An XML Name, minus # the ":" */ # [5] NCNameChar ::= Letter | Digit | '.' | '-' | '_' | CombiningChar # | Extender def is_ncname(name): first = name[0] if first=="_" or category(first) in NAME_START_CATEGORIES: for i in xrange(1, len(name)): c = name[i] if not category(c) in NAME_CATEGORIES: if c in ALLOWED_NAME_CHARS: continue return 0 #if in compatibility area #if decomposition(c)!='': # return 0 return 1 else: return 0 XMLNS = "http://www.w3.org/XML/1998/namespace" def split_uri(uri): if uri.startswith(XMLNS): return (XMLNS, uri.split(XMLNS)[1]) length = len(uri) for i in xrange(0, length): c = uri[-i-1] if not category(c) in NAME_CATEGORIES: if c in ALLOWED_NAME_CHARS: continue for j in xrange(-1-i, length): if category(uri[j]) in NAME_START_CATEGORIES or uri[j]=="_": ns = uri[:j] if not ns: break ln = uri[j:] return (ns, ln) break raise Exception("Can't split '%s'" % uri) rdflib-2.4.2/rdflib/syntax/NamespaceManager.py0000644000175000017500000000755411153616030020263 0ustar nachonachofrom __future__ import generators from rdflib import URIRef, Literal, RDFS, Variable from rdflib.syntax.xml_names import split_uri from urlparse import urljoin, urldefrag from urllib import pathname2url, url2pathname import os, sys, new class NamespaceManager(object): def __init__(self, graph): self.graph = graph self.__cache = {} self.__log = None self.bind("xml", u"http://www.w3.org/XML/1998/namespace") self.bind("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#") self.bind("rdfs", "http://www.w3.org/2000/01/rdf-schema#") def reset(self): self.__cache = {} def __get_store(self): return self.graph.store store = property(__get_store) def qname(self, uri): prefix, namespace, name = self.compute_qname(uri) if prefix=="": return name else: return ":".join((prefix, name)) def normalizeUri(self,rdfTerm): """ Takes an RDF Term and 'normalizes' it into a QName (using the registered prefix) or (unlike compute_qname) the Notation 3 form for URIs: <...URI...> """ try: namespace, name = split_uri(rdfTerm) namespace = URIRef(namespace) except: if isinstance(rdfTerm,Variable): return "?%s"%rdfTerm else: return "<%s>"%rdfTerm prefix = self.store.prefix(namespace) if prefix is None and isinstance(rdfTerm,Variable): return "?%s"%rdfTerm elif prefix is None: return "<%s>"%rdfTerm else: qNameParts = self.compute_qname(rdfTerm) return ':'.join([qNameParts[0],qNameParts[-1]]) def compute_qname(self, uri): if not uri in self.__cache: namespace, name = split_uri(uri) namespace = URIRef(namespace) prefix = self.store.prefix(namespace) if prefix is None: prefix = "_%s" % len(list(self.store.namespaces())) self.bind(prefix, namespace) self.__cache[uri] = (prefix, namespace, name) return self.__cache[uri] def bind(self, prefix, namespace, override=True): namespace = URIRef(namespace) # When documenting explain that override only applies in what cases if prefix is None: prefix = '' bound_namespace = self.store.namespace(prefix) if bound_namespace and bound_namespace!=namespace: # prefix already in use for different namespace # # append number to end of prefix until we find one # that's not in use. if not prefix: prefix = "default" num = 1 while 1: new_prefix = "%s%s" % (prefix, num) if not self.store.namespace(new_prefix): break num +=1 self.store.bind(new_prefix, namespace) else: bound_prefix = self.store.prefix(namespace) if bound_prefix is None: self.store.bind(prefix, namespace) elif bound_prefix == prefix: pass # already bound else: if override or bound_prefix.startswith("_"): # or a generated prefix self.store.bind(prefix, namespace) def namespaces(self): for prefix, namespace in self.store.namespaces(): namespace = URIRef(namespace) yield prefix, namespace def absolutize(self, uri, defrag=1): base = urljoin("file:", pathname2url(os.getcwd())) result = urljoin("%s/" % base, uri, allow_fragments=not defrag) if defrag: result = urldefrag(result)[0] if not defrag: if uri and uri[-1]=="#" and result[-1]!="#": result = "%s#" % result return URIRef(result) rdflib-2.4.2/rdflib/syntax/serializer.py0000644000175000017500000000330111203343450017226 0ustar nachonachoimport tempfile, shutil, os from threading import Lock from urlparse import urlparse try: from cStringIO import StringIO except ImportError: from StringIO import StringIO class Serializer(object): def __init__(self, serializer): self.serializer = serializer self.__save_lock = Lock() def _get_store(self): return self.serializer.store def _set_store(self, store): self.serializer.store = store store = property(_get_store, _set_store) def serialize(self, destination=None, format="xml", base=None, encoding=None, **args): if destination is None: stream = StringIO() self.serializer.serialize(stream, base=base, encoding=encoding, **args) return stream.getvalue() if hasattr(destination, "write"): stream = destination self.serializer.serialize(stream, base=base, encoding=encoding, **args) else: location = destination try: self.__save_lock.acquire() scheme, netloc, path, params, query, fragment = urlparse(location) if netloc!="": print "WARNING: not saving as location is not a local file reference" return name = tempfile.mktemp() stream = open(name, 'wb') self.serializer.serialize(stream, base=base, encoding=encoding, **args) stream.close() if hasattr(shutil,"move"): shutil.move(name, path) else: shutil.copy(name, path) os.remove(name) finally: self.__save_lock.release() rdflib-2.4.2/rdflib/syntax/parsers/0000755000175000017500000000000011204354476016200 5ustar nachonachordflib-2.4.2/rdflib/syntax/parsers/n3p/0000755000175000017500000000000011204354476016700 5ustar nachonachordflib-2.4.2/rdflib/syntax/parsers/n3p/uripath.py0000755000175000017500000003526411153616027020737 0ustar nachonacho#!/bin/env python """ Uniform Resource Identifier (URI) path manipulation, above the access layer The name of this module and the functions are somewhat arbitrary; they hark to other parts of the python library; e.g. uripath.join() is somewhat like os.path.join(). REFERENCES Uniform Resource Identifiers (URI): Generic Syntax http://www.ietf.org/rfc/rfc2396.txt The Web Model: Information hiding and URI syntax (Jan 98) http://www.w3.org/DesignIssues/Model.html URI API design [was: URI Test Suite] Dan Connolly (Sun, Aug 12 2001) http://lists.w3.org/Archives/Public/uri/2001Aug/0021.html """ __version__ = "$Id: uripath.py,v 1.16 2004/03/21 04:24:35 timbl Exp $" from string import find, rfind, index def splitFrag(uriref): """split a URI reference between the fragment and the rest. Punctuation is thrown away. e.g. >>> splitFrag("abc#def") ('abc', 'def') >>> splitFrag("abcdef") ('abcdef', None) """ i = rfind(uriref, "#") if i>= 0: return uriref[:i], uriref[i+1:] else: return uriref, None def splitFragP(uriref, punct=0): """split a URI reference before the fragment Punctuation is kept. e.g. >>> splitFragP("abc#def") ('abc', '#def') >>> splitFragP("abcdef") ('abcdef', '') """ i = rfind(uriref, "#") if i>= 0: return uriref[:i], uriref[i:] else: return uriref, '' def join(here, there): """join an absolute URI and URI reference (non-ascii characters are supported/doctested; haven't checked the details of the IRI spec though) here is assumed to be absolute. there is URI reference. >>> join('http://example/x/y/z', '../abc') 'http://example/x/abc' Raise ValueError if there uses relative path syntax but here has no hierarchical path. >>> join('mid:foo@example', '../foo') Traceback (most recent call last): raise ValueError, here ValueError: Base has no slash after colon - with relative '../foo'. We grok IRIs >>> len(u'Andr\\xe9') 5 >>> join('http://example.org/', u'#Andr\\xe9') u'http://example.org/#Andr\\xe9' """ assert(find(here, "#") < 0), "Base may not contain hash: '%s'"% here # caller must splitFrag (why?) slashl = find(there, '/') colonl = find(there, ':') # join(base, 'foo:/') -- absolute if colonl >= 0 and (slashl < 0 or colonl < slashl): return there bcolonl = find(here, ':') assert(bcolonl >= 0), "Base uri '%s' is not absolute" % here # else it's not absolute # join('mid:foo@example', '../foo') bzzt if here[bcolonl+1:bcolonl+2] <> '/': raise ValueError ("Base <%s> has no slash after colon - with relative '%s'." %(here, there)) if here[bcolonl+1:bcolonl+3] == '//': bpath = find(here, '/', bcolonl+3) else: bpath = bcolonl+1 # join('http://xyz', 'foo') if bpath < 0: bpath = len(here) here = here + '/' # join('http://xyz/', '//abc') => 'http://abc' if there[:2] == '//': return here[:bcolonl+1] + there # join('http://xyz/', '/abc') => 'http://xyz/abc' if there[:1] == '/': return here[:bpath] + there slashr = rfind(here, '/') path, frag = splitFragP(there) if not path: return here + frag while 1: if path[:2] == './': path = path[2:] if path == '.': path = '' elif path[:3] == '../' or path == '..': path = path[3:] i = rfind(here, '/', bpath, slashr) if i >= 0: here = here[:i+1] slashr = i else: break return here[:slashr+1] + path + frag import re import string commonHost = re.compile(r'^[-_a-zA-Z0-9.]+:(//[^/]*)?/[^/]*$') def refTo(base, uri): """figure out a relative URI reference from base to uri >>> refTo('http://example/x/y/z', 'http://example/x/abc') '../abc' >>> refTo('file:/ex/x/y', 'file:/ex/x/q/r#s') 'q/r#s' >>> refTo(None, 'http://ex/x/y') 'http://ex/x/y' >>> refTo('http://ex/x/y', 'http://ex/x/y') '' Note the relationship between refTo and join: join(x, refTo(x, y)) == y which points out certain strings which cannot be URIs. e.g. >>> x='http://ex/x/y';y='http://ex/x/q:r';join(x, refTo(x, y)) == y 0 So 'http://ex/x/q:r' is not a URI. Use 'http://ex/x/q%3ar' instead: >>> x='http://ex/x/y';y='http://ex/x/q%3ar';join(x, refTo(x, y)) == y 1 This one checks that it uses a root-realtive one where that is all they share. Now uses root-relative where no path is shared. This is a matter of taste but tends to give more resilience IMHO -- and shorter paths Note that base may be None, meaning no base. In some situations, there just ain't a base. Slife. In these cases, relTo returns the absolute value. The axiom abs(,rel(b,x))=x still holds. This saves people having to set the base to "bogus:". >>> refTo('http://ex/x/y/z', 'http://ex/r') '/r' """ # assert base # don't mask bugs -danc # not a bug. -tim if not base: return uri if base == uri: return "" # Find how many path segments in common i=0 while i0 and uri[i-1] != '/' : i=i-1 # scan for slash if i < 3: return uri # No way. if string.find(base, "//", i-2)>0 \ or string.find(uri, "//", i-2)>0: return uri # An unshared "//" if string.find(base, ":", i)>0: return uri # An unshared ":" n = string.count(base, "/", i) if n == 0 and i file:/some/dir/#blort # # Revision 1.4 2002/08/07 14:32:21 timbl # uripath changes. passes 51 general tests and 25 loopback tests # # Revision 1.3 2002/08/06 01:36:09 connolly # cleanup: diagnostic interface, relative/absolute uri handling # # Revision 1.2 2002/03/15 23:53:02 connolly # handle no-auth case # # Revision 1.1 2002/02/19 22:52:42 connolly # renamed uritools.py to uripath.py # # Revision 1.2 2002/02/18 07:33:51 connolly # pathTo seems to work # rdflib-2.4.2/rdflib/syntax/parsers/n3p/n3meta.py0000755000175000017500000015463511153616027020456 0ustar nachonacho#!/usr/bin/env python """n3meta - For use with n3p.py.""" # Automatically generated by pkltopy.py import re branches = {u'_:jcOJHCYs16': {u',': [u',', 'http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs16'], u'.': [], u'}': []}, u'_:jcOJHCYs20': {u',': [u',', 'http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs20'], u'.': [], u'}': []}, u'_:jcOJHCYs33': {u'.': [], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs16'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs16'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs16'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs16'], u'}': []}, u'_:jcOJHCYs36': {u'.': [], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs20'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs20'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs20'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol', '_:jcOJHCYs20'], u'}': []}, u'_:jcOJHCYs44': {u'.': [], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#barename', '_:jcOJHCYs9'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#barename', '_:jcOJHCYs9'], u'}': []}, u'_:jcOJHCYs9': {u',': [u',', 'http://www.w3.org/2000/10/swap/grammar/n3#barename', '_:jcOJHCYs9'], u'.': [], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#declaration': {u'@keywords': [u'@keywords', '_:jcOJHCYs44'], u'@prefix': [u'@prefix', 'http://www.w3.org/2000/10/swap/grammar/n3#qname', 'http://www.w3.org/2000/10/swap/grammar/n3#explicituri']}, u'http://www.w3.org/2000/10/swap/grammar/n3#document': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@EOFDUMMY': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@forAll': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@forSome': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@keywords': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@prefix': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#statements_optional', 'http://www.w3.org/2000/10/swap/grammar/bnf#eof']}, u'http://www.w3.org/2000/10/swap/grammar/n3#dtlang': {u'!': [], u'"': [], u'(': [], u')': [], u'+': [], u',': [], u'-': [], u'.': [], u'0': [], u':': [], u';': [], u'<': [], u'<=': [], u'=': [], u'=>': [], u'?': [], u'@': [u'@', 'http://www.w3.org/2000/10/swap/grammar/n3#langcode'], u'@a': [], u'@has': [], u'@is': [], u'@of': [], u'@this': [], u'[': [], u']': [], u'^': [], u'^^': [u'^^', 'http://www.w3.org/2000/10/swap/grammar/n3#symbol'], u'_': [], u'a': [], u'{': [], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#existential': {u'@forSome': [u'@forSome', '_:jcOJHCYs36']}, u'http://www.w3.org/2000/10/swap/grammar/n3#formulacontent': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'@forAll': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'@forSome': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'@keywords': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'@prefix': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#literal': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#string', 'http://www.w3.org/2000/10/swap/grammar/n3#dtlang']}, u'http://www.w3.org/2000/10/swap/grammar/n3#node': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#literal'], u'(': [u'(', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist', u')'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#numericliteral'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#numericliteral'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#numericliteral'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#variable'], u'@this': [u'@this'], u'[': [u'[', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist', u']'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#symbol'], u'{': [u'{', 'http://www.w3.org/2000/10/swap/grammar/n3#formulacontent', u'}']}, u'http://www.w3.org/2000/10/swap/grammar/n3#object': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#path']}, u'http://www.w3.org/2000/10/swap/grammar/n3#objecttail': {u',': [u',', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail'], u'.': [], u';': [], u']': [], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#path': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#node', 'http://www.w3.org/2000/10/swap/grammar/n3#pathtail']}, u'http://www.w3.org/2000/10/swap/grammar/n3#pathlist': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u')': [], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#path', 'http://www.w3.org/2000/10/swap/grammar/n3#pathlist']}, u'http://www.w3.org/2000/10/swap/grammar/n3#pathtail': {u'!': [u'!', 'http://www.w3.org/2000/10/swap/grammar/n3#path'], u'"': [], u'(': [], u')': [], u'+': [], u',': [], u'-': [], u'.': [], u'0': [], u':': [], u';': [], u'<': [], u'<=': [], u'=': [], u'=>': [], u'?': [], u'@a': [], u'@has': [], u'@is': [], u'@of': [], u'@this': [], u'[': [], u']': [], u'^': [u'^', 'http://www.w3.org/2000/10/swap/grammar/n3#path'], u'_': [], u'a': [], u'{': [], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#propertylist': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'.': [], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'<=': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'=': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'=>': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'@a': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'@has': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'@is': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u']': [], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#verb', 'http://www.w3.org/2000/10/swap/grammar/n3#object', 'http://www.w3.org/2000/10/swap/grammar/n3#objecttail', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail'], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#propertylisttail': {u'.': [], u';': [u';', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u']': [], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#subject', 'http://www.w3.org/2000/10/swap/grammar/n3#propertylist']}, u'http://www.w3.org/2000/10/swap/grammar/n3#statement': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'@forAll': ['http://www.w3.org/2000/10/swap/grammar/n3#universal'], u'@forSome': ['http://www.w3.org/2000/10/swap/grammar/n3#existential'], u'@keywords': ['http://www.w3.org/2000/10/swap/grammar/n3#declaration'], u'@prefix': ['http://www.w3.org/2000/10/swap/grammar/n3#declaration'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#simpleStatement']}, u'http://www.w3.org/2000/10/swap/grammar/n3#statementlist': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'@forAll': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'@forSome': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'@keywords': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'@prefix': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', 'http://www.w3.org/2000/10/swap/grammar/n3#statementtail'], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'@EOFDUMMY': [], u'@forAll': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'@forSome': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'@keywords': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'@prefix': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#statement', u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statements_optional']}, u'http://www.w3.org/2000/10/swap/grammar/n3#statementtail': {u'.': [u'.', 'http://www.w3.org/2000/10/swap/grammar/n3#statementlist'], u'}': []}, u'http://www.w3.org/2000/10/swap/grammar/n3#subject': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#path']}, u'http://www.w3.org/2000/10/swap/grammar/n3#symbol': {u':': ['http://www.w3.org/2000/10/swap/grammar/n3#qname'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#explicituri'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#qname'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#qname']}, u'http://www.w3.org/2000/10/swap/grammar/n3#universal': {u'@forAll': [u'@forAll', '_:jcOJHCYs33']}, u'http://www.w3.org/2000/10/swap/grammar/n3#verb': {u'"': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'(': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'+': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'-': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'0': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u':': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'<': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'<=': [u'<='], u'=': [u'='], u'=>': [u'=>'], u'?': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'@a': [u'@a'], u'@has': [u'@has', 'http://www.w3.org/2000/10/swap/grammar/n3#path'], u'@is': [u'@is', 'http://www.w3.org/2000/10/swap/grammar/n3#path', u'@of'], u'@this': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'[': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'_': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'a': ['http://www.w3.org/2000/10/swap/grammar/n3#path'], u'{': ['http://www.w3.org/2000/10/swap/grammar/n3#path']}} regexps = { u'http://www.w3.org/2000/10/swap/grammar/n3#barename': re.compile(u'[a-zA-Z_][a-zA-Z0-9_]*'), u'http://www.w3.org/2000/10/swap/grammar/n3#variable': re.compile(u'\\?[a-zA-Z_][a-zA-Z0-9_]*'), u'http://www.w3.org/2000/10/swap/grammar/n3#qname': re.compile(u'(([a-zA-Z_][a-zA-Z0-9_]*)?:)?([a-zA-Z_][a-zA-Z0-9_-]*)?'), u'http://www.w3.org/2000/10/swap/grammar/n3#string': re.compile(u'("""[^"\\\\]*(?:(?:\\\\.|"(?!""))[^"\\\\]*)*""")|("[^"\\\\]*(?:\\\\.[^"\\\\]*)*")'), u'http://www.w3.org/2000/10/swap/grammar/n3#explicituri': re.compile(u'<[^>]*>'), u'http://www.w3.org/2000/10/swap/grammar/n3#langcode': re.compile(u'[a-z]+(-[a-z0-9]+)*'), u'http://www.w3.org/2000/10/swap/grammar/n3#numericliteral': re.compile(u'[-+]?[0-9]+(\\.[0-9]+)?(e[-+]?[0-9]+)?'), } if __name__=="__main__": print __doc__ rdflib-2.4.2/rdflib/syntax/parsers/n3p/n3p.py0000755000175000017500000001345611153616027017762 0ustar nachonacho#!/usr/bin/env python """ N3P - An N3 Parser using n3.n3 Author: Sean B. Palmer, inamidst.com Licence: GPL 2; share and enjoy! License: http://www.w3.org/Consortium/Legal/copyright-software Documentation: http://inamidst.com/n3p/ Derived from: http://www.w3.org/2000/10/swap/grammar/predictiveParser.py - predictiveParser.py, Tim Berners-Lee, 2004 Issues: http://lists.w3.org/Archives/Public/public-cwm-bugs/2005Jan/0006 http://lists.w3.org/Archives/Public/public-cwm-talk/2005JanMar/0015 """ import sys, os, re, urllib import cPickle as pickle try: set() except NameError: from sets import Set as set try: import n3meta branches = n3meta.branches regexps = n3meta.regexps except ImportError: for path in sys.path: fn = os.path.join(path, 'n3meta.pkl') if os.path.isfile(fn): f = open(fn, 'rb') n3meta = pickle.load(f) f.close() branches = n3meta['branches'] regexps = n3meta['regexps'] break start = 'http://www.w3.org/2000/10/swap/grammar/n3#document' r_whitespace = re.compile(r'[ \t\r\n]*(?:(?:#[^\n]*)?\r?(?:$|\n))?') singleCharacterSelectors = "\t\r\n !\"#$%&'()*.,+/;<=>?[\\]^`{|}~" r_qname = re.compile(r'([A-Za-z0-9_:]*)') r_name = re.compile(r'([A-Za-z0-9_]*)') notQNameChars = singleCharacterSelectors + "@" notNameChars = notQNameChars + ":" def abbr(prodURI): return prodURI.split('#').pop() class N3Parser(object): def __init__(self, uri, branches, regexps): if uri == 'nowhere': pass elif (uri != 'file:///dev/stdin'): u = urllib.urlopen(uri) self.data = u.read() u.close() else: self.data = sys.stdin.read() self.pos = 0 self.branches = branches self.regexps = regexps self.keywordMode = False self.keywords = set(("a", "is", "of", "this", "has")) self.productions = [] self.memo = {} def parse(self, prod): todoStack = [[prod, None]] while todoStack: if todoStack[-1][1] is None: todoStack[-1][1] = [] tok = self.token() # Got an opened production self.onStart(abbr(todoStack[-1][0])) if not tok: return tok # EOF prodBranch = self.branches[todoStack[-1][0]] sequence = prodBranch.get(tok, None) if sequence is None: #print >> sys.stderr, 'prodBranch', prodBranch msg = "Found %s when expecting a %s . todoStack=%r" args = (tok, todoStack[-1][0], todoStack) raise ValueError, (msg % args) for term in sequence: todoStack[-1][1].append(term) while todoStack[-1][1]: term = todoStack[-1][1].pop(0) if isinstance(term, unicode): j = self.pos + len(term) word = self.data[self.pos:j] if word == term: self.onToken(term, word) self.pos = j elif '@' + word[:-1] == term: self.onToken(term, word[:-1]) self.pos = j - 1 else: msg = "Found %s; %s expected" args = (self.data[self.pos:self.pos+10], term) raise ValueError, (msg % args) elif not self.regexps.has_key(term): todoStack.append([term, None]) continue else: regexp = self.regexps[term] m = regexp.match(self.data, self.pos) if not m: msg = "Token: %r should match %s" args = (self.data[self.pos:self.pos+10], regexp.pattern) raise ValueError, (msg % args) end = m.end() self.onToken(abbr(term), self.data[self.pos:end]) self.pos = end self.token() while todoStack[-1][1] == []: todoStack.pop() self.onFinish() def token(self): """Memoizer for getToken.""" if self.memo.has_key(self.pos): return self.memo[self.pos] result = self.getToken() pos = self.pos self.memo[pos] = result return result def getToken(self): self.whitespace() if self.pos == len(self.data): return '' # EOF! ch2 = self.data[self.pos:self.pos+2] for double in ('=>', '<=', '^^'): if ch2 == double: return double ch = self.data[self.pos] if ch == '.' and self.keywordMode: self.keywordMode = False if ch in singleCharacterSelectors + '"': return ch elif ch in '+-0123456789': return '0' if ch == '@': if self.pos and (self.data[self.pos-1] == '"'): return '@' name = r_name.match(self.data, self.pos + 1).group(1) if name == 'keywords': self.keywords = set() self.keywordMode = True return '@' + name word = r_qname.match(self.data, self.pos).group(1) if self.keywordMode: self.keywords.add(word) elif word in self.keywords: if word == 'keywords': self.keywords = set() self.keywordMode = True return '@' + word # implicit keyword return 'a' def whitespace(self): while True: end = r_whitespace.match(self.data, self.pos).end() if end <= self.pos: break self.pos = end def onStart(self, prod): print (' ' * len(self.productions)) + prod self.productions.append(prod) def onFinish(self): prod = self.productions.pop() print (' ' * len(self.productions)) + '/' + prod def onToken(self, prod, tok): print (' ' * len(self.productions)) + prod, tok def main(argv=None): if argv is None: argv = sys.argv if len(argv) == 2: p = N3Parser(argv[1], branches, regexps) p.parse(start) if __name__=="__main__": main() rdflib-2.4.2/rdflib/syntax/parsers/n3p/n3proc.py0000755000175000017500000004235211153616027020463 0ustar nachonacho#!/usr/bin/env python """ n3proc - An N3 Processor using n3.n3 Author: Sean B. Palmer, inamidst.com Licence: GPL 2; share and enjoy! License: http://www.w3.org/Consortium/Legal/copyright-software Documentation: http://inamidst.com/n3p/ usage: %prog [options] """ from rdflib import URIRef, BNode, Literal, Variable, Namespace from rdflib.Graph import QuotedGraph import sys, os.path, re, time, urllib import n3p try: from uripath import join as urijoin except ImportError: print >> sys.stderr, "uripath.py not found" from urlparse import urljoin as urijoin RDF = Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#') OWL = Namespace('http://www.w3.org/2002/07/owl#') LOG = Namespace('http://www.w3.org/2000/10/swap/log#') XSD = Namespace('http://www.w3.org/2001/XMLSchema#') N3R = Namespace('http://www.w3.org/2000/10/swap/reify#') r_unilower = re.compile(r'(?<=\\u)([0-9a-f]{4})|(?<=\\U)([0-9a-f]{8})') r_hibyte = re.compile(r'[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F-\xFF]') def quote(s): if not isinstance(s, unicode): s = unicode(s, 'utf-8') # @@ not required? if not (u'\\'.encode('unicode-escape') == '\\\\'): s = s.replace('\\', r'\\') s = s.replace('"', r'\"') # s = s.replace(r'\\"', r'\"') s = r_hibyte.sub(lambda m: '\\u00%02X' % ord(m.group(0)), s) s = s.encode('unicode-escape') s = r_unilower.sub(lambda m: (m.group(1) or m.group(2)).upper(), s) return str(s) quot = {'t': '\t', 'n': '\n', 'r': '\r', '"': '"', '\\': '\\'} r_quot = re.compile(r'\\(t|n|r|"|\\)') r_uniquot = re.compile(r'\\u([0-9A-F]{4})|\\U([0-9A-F]{8})') class ParseError(Exception): pass def unquote(s, triplequoted=False, r_safe = re.compile(ur'([\x20\x21\x23-\x5B\x5D-\x7E\u00A0-\uFFFF]+)')): """Unquote an N-Triples string. Derived from: http://inamidst.com/proj/rdf/ntriples.py """ result = [] while s: m = r_safe.match(s) if m: s = s[m.end():] result.append(m.group(1)) continue m = r_quot.match(s) if m: s = s[2:] result.append(quot[m.group(1)]) continue m = r_uniquot.match(s) if m: s = s[m.end():] u, U = m.groups() codepoint = int(u or U, 16) if codepoint > 0x10FFFF: raise ParseError("Disallowed codepoint: %08X" % codepoint) result.append(unichr(codepoint)) elif s.startswith('\\'): raise ParseError("Illegal escape at: %s..." % s[:10]) elif triplequoted and (s[0] in '\n"'): result.append(s[0]) s = s[1:] else: raise ParseError("Illegal literal character: %r" % s[0]) return unicode(''.join(result)) branches = n3p.branches regexps = n3p.regexps start = n3p.start class N3Processor(n3p.N3Parser): def __init__(self, uri, sink, baseURI=False): super(N3Processor, self).__init__(uri, branches, regexps) if baseURI is False: self.baseURI = uri else: self.baseURI = baseURI self.sink = sink self.bindings = {'': urijoin(self.baseURI, '#')} self.counter = 0 self.prefix = False self.userkeys = False self.anonsubj = False self.litinfo = False self.forAll = False self.forSome = False self.universals = {} self.existentials = {} self.formulae = [] self.labels = [] self.mode = [] self.triples = [] self.pathmode = 'path' self.paths = [] self.lists = [] self.bnodes = {} def parse(self, start=start): super(N3Processor, self).parse(start) def onStart(self, prod): self.productions.append(prod) handler = prod + 'Start' if hasattr(self, handler): getattr(self, handler)(prod) def onFinish(self): prod = self.productions.pop() handler = prod + 'Finish' if hasattr(self, handler): getattr(self, handler)() def onToken(self, prod, tok): if self.productions: parentProd = self.productions[-1] handler = parentProd + 'Token' if hasattr(self, handler): getattr(self, handler)(prod, tok) else: raise Exception("Token has no parent production.") def documentStart(self, prod): formula = self.sink.graph self.formulae.append(formula) self.sink.start(formula) def declarationToken(self, prod, tok): if prod == '@prefix': self.prefix = [] elif prod == '@keywords': self.userkeys = True # bah elif (self.prefix is not False) and prod == 'qname': self.prefix.append(tok[:-1]) elif prod == 'explicituri': self.prefix.append(tok[1:-1]) def declarationFinish(self): if self.prefix: self.bindings[self.prefix[0]] = self.prefix[1] self.prefix = False def universalStart(self, prod): self.forAll = [] def universalFinish(self): for term in self.forAll: v = self.univar('var') self.universals[term] = (self.formulae[-1], v) self.sink.quantify(self.formulae[-1], v) self.forAll = False def existentialStart(self, prod): self.forSome = [] def existentialFinish(self): for term in self.forSome: b = BNode() self.existentials[term] = (self.formulae[-1], b) self.sink.quantify(self.formulae[-1], b) self.forSome = False def simpleStatementStart(self, prod): self.triples.append([]) def simpleStatementFinish(self): if self.triples: self.triples.pop() def pathStart(self, prod): # p = self.paths # if not (p and p[-1] and (p[-1][-1] in '!^')): if (not self.paths) or (self.pathmode == 'path'): self.paths.append([]) self.pathcounter = 1 else: self.pathcounter += 1 self.pathmode = 'path' def pathtailStart(self, prod): self.pathcounter += 1 self.pathmode = 'pathtail' def pathtailToken(self, prod, tok): if prod == '!': self.paths[-1].append('!') elif prod == '^': self.paths[-1].append('^') def pathtailFinish(self): self.pathcounter -= 1 def pathFinish(self): self.pathcounter -= 1 self.pathmode = 'path' if self.paths and (self.pathcounter < 1): path = self.paths.pop() if not path: pass elif len(path) == 1: term = path.pop() if self.mode and self.mode[-1] == 'list': self.lists[-1].append(term) else: self.triples[-1].append(term) else: # A path traversal objt, path = path[0], path[1:] for (i, pred) in enumerate(path): if (i % 2) != 0: subj = objt objt = BNode() if path[i-1] == '!': self.triple(subj, pred, objt) elif path[i-1] == '^': self.triple(objt, pred, subj) # @@ nested paths? if self.mode and self.mode[-1] == 'list': self.lists[-1].append(objt) else: self.triples[-1].append(objt) # if self.anonsubj is True: # self.anonsubj = False # self.path = False def nodeToken(self, prod, tok): nodedict = {} def ointerp(prod, tok): b = BNode() # Record here if it's a subject node if self.anonsubj: self.anonsubj = False if ((not self.triples) or (False not in map(lambda s: not len(s), self.triples)) or (len(self.triples[-1]) == 3) or (len(self.triples) > 1 and len(self.triples[-2]) == 3 and not len(self.triples[-1]))): self.anonsubj = True if (self.paths and self.paths[-1] and self.paths[-1][-1] in '!^'): self.anonsubj = 'path' if self.paths: self.paths[-1].append(b) self.triples.append([b]) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(b) self.triples.append([b]) # else: self.triples[-1].append(b) elif len(self.triples[-1]) > 1: self.triples.append([b]) self.mode.append('triple') nodedict['['] = ointerp def cinterp(prod, tok): if ((not self.anonsubj) or (self.paths and len(self.paths[-1]) == 1)): self.triples.pop() elif self.anonsubj == 'path': self.triples.pop() self.triples.append([]) else: self.anonsubj = False self.mode.pop() nodedict[']'] = cinterp def oparen(prod, tok): self.lists.append([]) self.mode.append('list') nodedict['('] = oparen def cparen(prod, tok): items = self.lists.pop() if items: first = head = BNode() for (i, item) in enumerate(items): if i < len(items) - 1: rest = BNode() else: rest = RDF.nil self.triple(first, RDF.first, item) self.triple(first, RDF.rest, rest) first = rest else: head = RDF.nil self.mode.pop() if self.paths: self.paths[-1].append(head) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(head) else: self.triples[-1].append(head) nodedict[')'] = cparen def obrace(prod, tok): f = self.formula() if self.paths: self.paths[-1].append(f) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(f) else: self.triples[-1].append(f) self.formulae.append(f) self.labels.append('f' + str(self.counter)) nodedict['{'] = obrace def cbrace(prod, tok): self.formulae.pop() self.labels.pop() if self.triples and (len(self.triples[-1]) == 3): self.triple(*self.triples[-1]) self.triples[-1].pop() nodedict['}'] = cbrace def numericliteral(prod, tok): if '.' in tok: tok = str(float(tok)) lit = Literal(tok, datatype=XSD.double) else: tok = str(int(tok)) lit = Literal(tok, datatype=XSD.integer) if self.paths: self.paths[-1].append(lit) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(lit) else: self.triples[-1].append(lit) nodedict['numericliteral'] = numericliteral def variable(prod, tok): var = self.univar(tok[1:], sic=True) if self.paths: self.paths[-1].append(var) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(var) else: self.triples[-1].append(var) nodedict['variable'] = variable def this(prod, tok): formula = self.formulae[-1] if self.paths: self.paths[-1].append(formula) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(formula) else: self.triples[-1].append(formula) nodedict['@this'] = this try: nodedict[prod](prod, tok) except KeyError: pass def literalStart(self, prod): self.litinfo = {} def literalToken(self, prod, tok): if prod == 'string': self.litinfo['content'] = tok def dtlangToken(self, prod, tok): if prod == 'langcode': self.litinfo['language'] = tok def symbolToken(self, prod, tok): if prod == 'explicituri': term = self.uri(tok[1:-1]) elif prod == 'qname': term = self.qname(tok) if self.litinfo: self.litinfo['datatype'] = term elif self.forAll is not False: self.forAll.append(term) elif self.forSome is not False: self.forSome.append(term) elif self.paths: self.paths[-1].append(term) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(term) else: self.triples[-1].append(term) def literalFinish(self): content = self.litinfo['content'] language = self.litinfo.get('language') datatype = self.litinfo.get('datatype') lit = self.literal(content, language, datatype) if self.paths: self.paths[-1].append(lit) elif self.mode and self.mode[-1] == 'list': self.lists[-1].append(lit) else: self.triples[-1].append(lit) self.litinfo = False def objectFinish(self): if self.triples and (len(self.triples[-1]) == 3): self.triple(*self.triples[-1]) self.triples[-1].pop() def propertylisttailToken(self, prod, tok): if prod == ';': self.triples[-1] = [self.triples[-1][0]] def verbToken(self, prod, tok): vkwords ={'@a': RDF.type, '=': OWL.sameAs, '=>': LOG.implies, '<=': LOG.implies} if vkwords.has_key(prod): term = vkwords[prod] # if self.paths: # self.paths[-1].append(term) if self.mode and self.mode[-1] == 'list': self.lists[-1].append(term) else: self.triples[-1].append(term) if prod in ('@of', '<='): # @@ test <= in CWM verb = (self.triples[-1][1],) self.triples[-1][1] = verb def triple(self, subj, pred, objt): scp = self.formulae[-1] if not isinstance(pred, tuple): self.sink.statement(subj, pred, objt, scp) else: self.sink.statement(objt, pred[0], subj, scp) def qname(self, tok): if ':' in tok: prefix, name = tok.split(':') elif self.userkeys: prefix, name = '', tok else: raise ParseError("Set user @keywords to use barenames.") if (prefix == '_') and (not self.bindings.has_key('_')): if name in self.bnodes: bnode = self.bnodes[name] else: bnode = BNode() self.bnodes[name] = bnode return bnode elif not self.bindings.has_key(prefix): print >> sys.stderr, "Prefix not bound: %s" % prefix return self.uri(self.bindings[prefix] + name) def uri(self, tok): u = URIRef(urijoin(self.baseURI, tok)) if self.universals.has_key(u): formula, var = self.universals[u] if formula in self.formulae: return var if self.existentials.has_key(u): # @@ elif? formula, bnode = self.existentials[u] if formula in self.formulae: return bnode return u def formula(self): formula_id = BNode() if formula_id == self.sink.graph.identifier: return self.sink.graph else: return QuotedGraph(store=self.sink.graph.store, identifier=formula_id) #return self.sink.graph.get_context(formula_id, quoted=True) def literal(self, content, language, datatype): if content.startswith('"""'): content = unquote(content[3:-3].decode('utf-8'), triplequoted=True) else: content = unquote(content[1:-1].decode('utf-8')) return Literal(content, language, datatype) def univar(self, label, sic=False): if not sic: self.counter += 1 label += str(self.counter) return Variable(label) class NTriplesSink(object): def __init__(self, out=None): self.out = out or sys.stdout self.counter = 0 def start(self, root): self.root = root def statement(self, s, p, o, f): if f == self.root: self.out.write("%s %s %s .\n" % (s, p, o)) else: self.flatten(s, p, o, f) def quantify(self, formula, var): if formula != self.root: if var.startswith('_'): pred = N3R.existential elif var.startswith('?'): pred = N3R.universal self.out.write("%s %s %s .\n" % (formula, pred, var)) def makeStatementID(self): return BNode() def flatten(self, s, p, o, f): fs = self.makeStatementID() self.out.write("%s %s %s .\n" % (f, N3R.statement, fs)) self.out.write("%s %s %s .\n" % (fs, N3R.subject, s)) self.out.write("%s %s %s .\n" % (fs, N3R.predicate, p)) self.out.write("%s %s %s .\n" % (fs, N3R.object, o)) def parse(uri, options): baseURI = options.baseURI sink = NTriplesSink() if options.root: sink.quantify = lambda *args: True sink.flatten = lambda *args: True if ':' not in uri: uri = 'file://' + os.path.join(os.getcwd(), uri) if baseURI and (':' not in baseURI): baseURI = 'file://' + os.path.join(os.getcwd(), baseURI) p = N3Processor(uri, sink, baseURI=baseURI) p.parse() def main(argv=None): import optparse class MyHelpFormatter(optparse.HelpFormatter): def __init__(self): kargs = {'indent_increment': 2, 'short_first': 1, 'max_help_position': 25, 'width': None} optparse.HelpFormatter.__init__(self, **kargs) def format_usage(self, usage): return optparse._("%s") % usage.lstrip() def format_heading(self, heading): return "%*s%s:\n" % (self.current_indent, "", heading) formatter = MyHelpFormatter() parser = optparse.OptionParser(usage=__doc__, formatter=formatter) parser.add_option("-b", "--baseURI", dest="baseURI", default=False, help="set the baseURI", metavar="URI") parser.add_option("-r", "--root", dest="root", action="store_true", default=False, help="print triples in the root formula only") options, args = parser.parse_args(argv) if len(args) == 1: parse(args[0], options) else: parser.print_help() if __name__=="__main__": main() rdflib-2.4.2/rdflib/syntax/parsers/n3p/__init__.py0000644000175000017500000000000211153616027020775 0ustar nachonacho# rdflib-2.4.2/rdflib/syntax/parsers/TriXParser.py0000644000175000017500000000252011153616030020602 0ustar nachonachofrom rdflib.syntax.parsers import Parser from rdflib.Graph import ConjunctiveGraph from xml.sax import make_parser from xml.sax.saxutils import handler from xml.sax.handler import ErrorHandler from rdflib.syntax.parsers.TriXHandler import TriXHandler def create_parser(store): parser = make_parser() # Workaround for bug in expatreader.py. Needed when # expatreader is trying to guess a prefix. parser.start_namespace_decl("xml", "http://www.w3.org/XML/1998/namespace") parser.setFeature(handler.feature_namespaces, 1) trix = TriXHandler(store) parser.setContentHandler(trix) parser.setErrorHandler(ErrorHandler()) return parser class TriXParser(Parser): """A parser for TriX. See http://swdev.nokia.com/trix/TriX.html""" def __init__(self): pass def parse(self, source, sink, **args): assert sink.store.context_aware g=ConjunctiveGraph(store=sink.store) self._parser = create_parser(g) content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: content_handler.preserve_bnode_ids = preserve_bnode_ids # We're only using it once now #content_handler.reset() #self._parser.reset() self._parser.parse(source) rdflib-2.4.2/rdflib/syntax/parsers/TriXHandler.py0000644000175000017500000001716411164201112020726 0ustar nachonacho# Copyright (c) 2002, Daniel Krech, http://eikeon.com/ # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # * Neither the name of Daniel Krech nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ """ from rdflib import RDF, RDFS, Namespace from rdflib import URIRef, BNode, Literal from rdflib.Graph import Graph from rdflib.exceptions import ParserError, Error from rdflib.syntax.xml_names import is_ncname from xml.sax.saxutils import handler, quoteattr, escape from urlparse import urljoin, urldefrag RDFNS = RDF.RDFNS TRIXNS=Namespace("http://www.w3.org/2004/03/trix/trix-1/") class TriXHandler(handler.ContentHandler): """An Sax Handler for TriX. See http://swdev.nokia.com/trix/TriX.html""" def __init__(self, store): self.store = store self.preserve_bnode_ids = False self.reset() def reset(self): self.bnode = {} self.graph=self.store self.triple=None self.state=0 self.lang=None self.datatype=None # ContentHandler methods def setDocumentLocator(self, locator): self.locator = locator def startDocument(self): pass def startPrefixMapping(self, prefix, namespace): pass def endPrefixMapping(self, prefix): pass def startElementNS(self, name, qname, attrs): if name[0]!=TRIXNS: self.error("Only elements in the TriX namespace are allowed.") if name[1]=="TriX": if self.state==0: self.state=1 else: self.error("Unexpected TriX element") elif name[1]=="graph": if self.state==1: self.state=2 else: self.error("Unexpected graph element") elif name[1]=="uri": if self.state==2: # the context uri self.state=3 elif self.state==4: # part of a triple pass else: self.error("Unexpected uri element") elif name[1]=="triple": if self.state==2: # start of a triple self.triple=[] self.state=4 else: self.error("Unexpected triple element") elif name[1]=="typedLiteral": if self.state==4: # part of triple self.lang=None self.datatype=None try: self.lang=attrs.getValueByQName("lang") except: # language not required - ignore pass try: self.datatype=attrs.getValueByQName("datatype") except KeyError: self.error("No required attribute 'datatype'") else: self.error("Unexpected typedLiteral element") elif name[1]=="plainLiteral": if self.state==4: # part of triple self.lang=None self.datatype=None try: self.lang=attrs.getValueByQName("lang") except: # language not required - ignore pass else: self.error("Unexpected plainLiteral element") elif name[1]=="id": if self.state==2: # the context uri self.state=3 elif self.state==4: # part of triple pass else: self.error("Unexpected id element") else: self.error("Unknown element %s in TriX namespace"%name[1]) self.chars="" def endElementNS(self, name, qname): if name[0]!=TRIXNS: self.error("Only elements in the TriX namespace are allowed.") if name[1]=="uri": if self.state==3: self.graph=Graph(store=self.store.store, identifier=URIRef(self.chars.strip())) self.state=2 elif self.state==4: self.triple+=[URIRef(self.chars.strip())] else: self.error("Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness") if name[1]=="id": if self.state==3: self.graph=Graph(self.store.store,identifier=self.get_bnode(self.chars.strip())) self.state=2 elif self.state==4: self.triple+=[self.get_bnode(self.chars.strip())] else: self.error("Illegal internal self.state - This should never happen if the SAX parser ensures XML syntax correctness") if name[1]=="plainLiteral" or name[1]=="typedLiteral": if self.state==4: self.triple+=[Literal(self.chars, lang=self.lang, datatype=self.datatype)] else: self.error("This should never happen if the SAX parser ensures XML syntax correctness") if name[1]=="triple": if self.state==4: if len(self.triple)!=3: self.error("Triple has wrong length, got %d elements: %s"%(len(self.triple),self.triple)) self.graph.add(self.triple) #self.store.store.add(self.triple,context=self.graph) #self.store.addN([self.triple+[self.graph]]) self.state=2 else: self.error("This should never happen if the SAX parser ensures XML syntax correctness") if name[1]=="graph": self.state=1 if name[1]=="TriX": self.state=0 def get_bnode(self,label): if self.preserve_bnode_ids: bn=BNode(label) else: if label in self.bnode: bn=self.bnode[label] else: bn=BNode(label) self.bnode[label]=bn return bn def characters(self, content): self.chars+=content def ignorableWhitespace(self, content): pass def processingInstruction(self, target, data): pass def error(self, message): locator = self.locator info = "%s:%s:%s: " % (locator.getSystemId(), locator.getLineNumber(), locator.getColumnNumber()) raise ParserError(info + message) rdflib-2.4.2/rdflib/syntax/parsers/ntriples.py0000644000175000017500000001467411153616030020414 0ustar nachonacho#!/usr/bin/env python """ N-Triples Parser License: GPL 2, W3C, BSD, or MIT Author: Sean B. Palmer, inamidst.com Documentation: http://inamidst.com/proj/rdf/ntriples-doc Command line usage: ./ntriples.py - parses URI as N-Triples ./ntriples.py --help - prints out this help message # @@ fully empty document? """ import re uriref = r'<([^:]+:[^\s"<>]+)>' literal = r'"([^"\\]*(?:\\.[^"\\]*)*)"' litinfo = r'(?:@([a-z]+(?:-[a-z0-9]+)*)|\^\^' + uriref + r')?' r_line = re.compile(r'([^\r\n]*)(?:\r\n|\r|\n)') r_wspace = re.compile(r'[ \t]*') r_wspaces = re.compile(r'[ \t]+') r_tail = re.compile(r'[ \t]*\.[ \t]*') r_uriref = re.compile(uriref) r_nodeid = re.compile(r'_:([A-Za-z][A-Za-z0-9]*)') r_literal = re.compile(literal + litinfo) bufsiz = 2048 validate = False class Node(unicode): pass # class URI(Node): pass # class bNode(Node): pass # class Literal(Node): # def __new__(cls, lit, lang=None, dtype=None): # n = str(lang) + ' ' + str(dtype) + ' ' + lit # return unicode.__new__(cls, n) from rdflib import URIRef as URI from rdflib import BNode as bNode from rdflib import Literal class Sink(object): def __init__(self): self.length = 0 def triple(self, s, p, o): self.length += 1 print (s, p, o) class ParseError(Exception): pass quot = {'t': '\t', 'n': '\n', 'r': '\r', '"': '"', '\\': '\\'} r_safe = re.compile(r'([\x20\x21\x23-\x5B\x5D-\x7E]+)') r_quot = re.compile(r'\\(t|n|r|"|\\)') r_uniquot = re.compile(r'\\u([0-9A-F]{4})|\\U([0-9A-F]{8})') def unquote(s): """Unquote an N-Triples string.""" result = [] while s: m = r_safe.match(s) if m: s = s[m.end():] result.append(m.group(1)) continue m = r_quot.match(s) if m: s = s[2:] result.append(quot[m.group(1)]) continue m = r_uniquot.match(s) if m: s = s[m.end():] u, U = m.groups() codepoint = int(u or U, 16) if codepoint > 0x10FFFF: raise ParseError("Disallowed codepoint: %08X" % codepoint) result.append(unichr(codepoint)) elif s.startswith('\\'): raise ParseError("Illegal escape at: %s..." % s[:10]) else: raise ParseError("Illegal literal character: %r" % s[0]) return unicode(''.join(result)) if not validate: def unquote(s): return s.decode('unicode-escape') r_hibyte = re.compile(r'([\x80-\xFF])') def uriquote(uri): return r_hibyte.sub(lambda m: '%%%02X' % ord(m.group(1)), uri) if not validate: def uriquote(uri): return uri class NTriplesParser(object): """An N-Triples Parser. Usage: p = NTriplesParser(sink=MySink()) sink = p.parse(f) # file; use parsestring for a string """ def __init__(self, sink=None): if sink is not None: self.sink = sink else: self.sink = Sink() def parse(self, f): """Parse f as an N-Triples file.""" if not hasattr(f, 'read'): raise ParseError("Item to parse must be a file-like object.") self.file = f self.buffer = '' while True: self.line = self.readline() if self.line is None: break try: self.parseline() except ParseError: raise ParseError("Invalid line: %r" % self.line) return self.sink def parsestring(self, s): """Parse s as an N-Triples string.""" if not isinstance(s, basestring): raise ParseError("Item to parse must be a string instance.") from cStringIO import StringIO f = StringIO() f.write(s) f.seek(0) self.parse(f) def readline(self): """Read an N-Triples line from buffered input.""" # N-Triples lines end in either CRLF, CR, or LF # Therefore, we can't just use f.readline() if not self.buffer: buffer = self.file.read(bufsiz) if not buffer: return None self.buffer = buffer while True: m = r_line.match(self.buffer) if m: # the more likely prospect self.buffer = self.buffer[m.end():] return m.group(1) else: buffer = self.file.read(bufsiz) if not buffer: raise ParseError("EOF in line") self.buffer += buffer def parseline(self): self.eat(r_wspace) if (not self.line) or self.line.startswith('#'): return # The line is empty or a comment subject = self.subject() self.eat(r_wspaces) predicate = self.predicate() self.eat(r_wspaces) object = self.object() self.eat(r_tail) if self.line: raise ParseError("Trailing garbage") self.sink.triple(subject, predicate, object) def peek(self, token): return self.line.startswith(token) def eat(self, pattern): m = pattern.match(self.line) if not m: # @@ Why can't we get the original pattern? raise ParseError("Failed to eat %s" % pattern) self.line = self.line[m.end():] return m def subject(self): # @@ Consider using dictionary cases subj = self.uriref() or self.nodeid() if not subj: raise ParseError("Subject must be uriref or nodeID") return subj def predicate(self): pred = self.uriref() if not pred: raise ParseError("Predicate must be uriref") return pred def object(self): objt = self.uriref() or self.nodeid() or self.literal() if objt is False: raise ParseError("Unrecognised object type") return objt def uriref(self): if self.peek('<'): uri = self.eat(r_uriref).group(1) uri = unquote(uri) uri = uriquote(uri) return URI(uri) return False def nodeid(self): if self.peek('_'): return bNode(self.eat(r_nodeid).group(1)) return False def literal(self): if self.peek('"'): lit, lang, dtype = self.eat(r_literal).groups() lang = lang or None dtype = dtype or None if lang and dtype: raise ParseError("Can't have both a language and a datatype") lit = unquote(lit) return Literal(lit, lang, dtype) return False def parseURI(uri): import urllib parser = NTriplesParser() u = urllib.urlopen(uri) sink = parser.parse(u) u.close() # for triple in sink: # print triple print 'Length of input:', sink.length def main(): import sys if len(sys.argv) == 2: parseURI(sys.argv[1]) else: print __doc__ if __name__=="__main__": main() rdflib-2.4.2/rdflib/syntax/parsers/RDFXMLHandler.py0000644000175000017500000004700311153616030021036 0ustar nachonacho# Copyright (c) 2002, Daniel Krech, http://eikeon.com/ # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # * Neither the name of Daniel Krech nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ """ from rdflib import RDF, RDFS from rdflib import URIRef, BNode, Literal from rdflib.exceptions import ParserError, Error from rdflib.syntax.xml_names import is_ncname from xml.sax.saxutils import handler, quoteattr, escape from urlparse import urljoin, urldefrag RDFNS = RDF.RDFNS # http://www.w3.org/TR/rdf-syntax-grammar/#eventterm-attribute-URI # A mapping from unqualified terms to there qualified version. UNQUALIFIED = {"about" : RDF.about, "ID" : RDF.ID, "type" : RDF.type, "resource": RDF.resource, "parseType": RDF.parseType} # http://www.w3.org/TR/rdf-syntax-grammar/#coreSyntaxTerms CORE_SYNTAX_TERMS = [RDF.RDF, RDF.ID, RDF.about, RDF.parseType, RDF.resource, RDF.nodeID, RDF.datatype] # http://www.w3.org/TR/rdf-syntax-grammar/#syntaxTerms SYNTAX_TERMS = CORE_SYNTAX_TERMS + [RDF.Description, RDF.li] # http://www.w3.org/TR/rdf-syntax-grammar/#oldTerms OLD_TERMS = [RDFNS["aboutEach"], RDFNS["aboutEachPrefix"], RDFNS["bagID"]] NODE_ELEMENT_EXCEPTIONS = CORE_SYNTAX_TERMS + [RDF.li,] + OLD_TERMS NODE_ELEMENT_ATTRIBUTES = [RDF.ID, RDF.nodeID, RDF.about] PROPERTY_ELEMENT_EXCEPTIONS = CORE_SYNTAX_TERMS + [RDF.Description,] + OLD_TERMS PROPERTY_ATTRIBUTE_EXCEPTIONS = CORE_SYNTAX_TERMS + [RDF.Description, RDF.li] + OLD_TERMS PROPERTY_ELEMENT_ATTRIBUTES = [RDF.ID, RDF.resource, RDF.nodeID] XMLNS = "http://www.w3.org/XML/1998/namespace" BASE = (XMLNS, "base") LANG = (XMLNS, "lang") class BagID(URIRef): __slots__ = ['li'] def __init__(self, val): super(URIRef, self).__init__(val) self.li = 0 def next_li(self): self.li += 1 return URIRef(RDFNS + "_%s" % self.li) class ElementHandler(object): __slots__ = ['start', 'char', 'end', 'li', 'id', 'base', 'subject', 'predicate', 'object', 'list', 'language', 'datatype', 'declared', 'data'] def __init__(self): self.start = None self.char = None self.end = None self.li = 0 self.id = None self.base = None self.subject = None self.object = None self.list = None self.language = None self.datatype = None self.declared = None self.data = None def next_li(self): self.li += 1 return URIRef(RDFNS + "_%s" % self.li) class RDFXMLHandler(handler.ContentHandler): def __init__(self, store): self.store = store self.preserve_bnode_ids = False self.reset() def reset(self): document_element = ElementHandler() document_element.start = self.document_element_start document_element.end = lambda name, qname: None self.stack = [None, document_element,] self.ids = {} # remember IDs we have already seen self.bnode = {} self._ns_contexts = [{}] # contains uri -> prefix dicts self._current_context = self._ns_contexts[-1] # ContentHandler methods def setDocumentLocator(self, locator): self.locator = locator def startDocument(self): pass def startPrefixMapping(self, prefix, namespace): self._ns_contexts.append(self._current_context.copy()) self._current_context[namespace] = prefix self.store.bind(prefix, URIRef(namespace), override=False) def endPrefixMapping(self, prefix): self._current_context = self._ns_contexts[-1] del self._ns_contexts[-1] def startElementNS(self, name, qname, attrs): stack = self.stack stack.append(ElementHandler()) current = self.current parent = self.parent base = attrs.get(BASE, None) if base is not None: base, frag = urldefrag(base) if parent and parent.base: base = urljoin(parent.base, base) else: systemId = self.locator.getPublicId() or self.locator.getSystemId() if systemId: base = urljoin(systemId, base) else: if parent: base = parent.base if base is None: systemId = self.locator.getPublicId() or self.locator.getSystemId() if systemId: base, frag = urldefrag(systemId) current.base = base language = attrs.get(LANG, None) if language is None: if parent: language = parent.language current.language = language current.start(name, qname, attrs) def endElementNS(self, name, qname): self.current.end(name, qname) self.stack.pop() def characters(self, content): char = self.current.char if char: char(content) def ignorableWhitespace(self, content): pass def processingInstruction(self, target, data): pass def add_reified(self, sid, (s, p, o)): self.store.add((sid, RDF.type, RDF.Statement)) self.store.add((sid, RDF.subject, s)) self.store.add((sid, RDF.predicate, p)) self.store.add((sid, RDF.object, o)) def error(self, message): locator = self.locator info = "%s:%s:%s: " % (locator.getSystemId(), locator.getLineNumber(), locator.getColumnNumber()) raise ParserError(info + message) def get_current(self): return self.stack[-2] # Create a read only property called current so that self.current # give the current element handler. current = property(get_current) def get_next(self): return self.stack[-1] # Create a read only property that gives the element handler to be # used for the next element. next = property(get_next) def get_parent(self): return self.stack[-3] # Create a read only property that gives the current parent # element handler parent = property(get_parent) def absolutize(self, uri): result = urljoin(self.current.base, uri, allow_fragments=1) if uri and uri[-1]=="#" and result[-1]!="#": result = "%s#" % result return URIRef(result) def convert(self, name, qname, attrs): if name[0] is None: name = URIRef(name[1]) else: name = URIRef("".join(name)) atts = {} for (n, v) in attrs.items(): #attrs._attrs.iteritems(): # if n[0] is None: att = URIRef(n[1]) else: att = URIRef("".join(n)) if att.startswith(XMLNS) or att[0:3].lower()=="xml": pass elif att in UNQUALIFIED: #if not RDFNS[att] in atts: atts[RDFNS[att]] = v else: atts[URIRef(att)] = v return name, atts def document_element_start(self, name, qname, attrs): if name[0] and URIRef("".join(name)) == RDF.RDF: next = self.next next.start = self.node_element_start next.end = self.node_element_end else: self.node_element_start(name, qname, attrs) #self.current.end = self.node_element_end # TODO... set end to something that sets start such that # another element will cause error def node_element_start(self, name, qname, attrs): name, atts = self.convert(name, qname, attrs) current = self.current absolutize = self.absolutize next = self.next next.start = self.property_element_start next.end = self.property_element_end if name in NODE_ELEMENT_EXCEPTIONS: self.error("Invalid node element URI: %s" % name) if RDF.ID in atts: if RDF.about in atts or RDF.nodeID in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") id = atts[RDF.ID] if not is_ncname(id): self.error("rdf:ID value is not a valid NCName: %s" % id) subject = absolutize("#%s" % id) if subject in self.ids: self.error("two elements cannot use the same ID: '%s'" % subject) self.ids[subject] = 1 # IDs can only appear once within a document elif RDF.nodeID in atts: if RDF.ID in atts or RDF.about in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") nodeID = atts[RDF.nodeID] if not is_ncname(nodeID): self.error("rdf:nodeID value is not a valid NCName: %s" % nodeID) if self.preserve_bnode_ids is False: if nodeID in self.bnode: subject = self.bnode[nodeID] else: subject = BNode() self.bnode[nodeID] = subject else: subject = BNode(nodeID) elif RDF.about in atts: if RDF.ID in atts or RDF.nodeID in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") subject = absolutize(atts[RDF.about]) else: subject = BNode() if name!=RDF.Description: # S1 self.store.add((subject, RDF.type, absolutize(name))) language = current.language for att in atts: if not att.startswith(RDFNS): predicate = absolutize(att) try: object = Literal(atts[att], language) except Error, e: self.error(e.msg) elif att==RDF.type: #S2 predicate = RDF.type object = absolutize(atts[RDF.type]) elif att in NODE_ELEMENT_ATTRIBUTES: continue elif att in PROPERTY_ATTRIBUTE_EXCEPTIONS: #S3 self.error("Invalid property attribute URI: %s" % att) continue # for when error does not throw an exception else: predicate = absolutize(att) try: object = Literal(atts[att], language) except Error, e: self.error(e.msg) self.store.add((subject, predicate, object)) current.subject = subject def node_element_end(self, name, qname): self.parent.object = self.current.subject def property_element_start(self, name, qname, attrs): name, atts = self.convert(name, qname, attrs) current = self.current absolutize = self.absolutize next = self.next object = None current.data = None current.list = None if not name.startswith(RDFNS): current.predicate = absolutize(name) elif name==RDF.li: current.predicate = current.next_li() elif name in PROPERTY_ELEMENT_EXCEPTIONS: self.error("Invalid property element URI: %s" % name) else: current.predicate = absolutize(name) id = atts.get(RDF.ID, None) if id is not None: if not is_ncname(id): self.error("rdf:ID value is not a value NCName: %s" % id) current.id = absolutize("#%s" % id) else: current.id = None resource = atts.get(RDF.resource, None) nodeID = atts.get(RDF.nodeID, None) parse_type = atts.get(RDF.parseType, None) if resource is not None and nodeID is not None: self.error("Property element cannot have both rdf:nodeID and rdf:resource") if resource is not None: object = absolutize(resource) next.start = self.node_element_start next.end = self.node_element_end elif nodeID is not None: if not is_ncname(nodeID): self.error("rdf:nodeID value is not a valid NCName: %s" % nodeID) if self.preserve_bnode_ids is False: if nodeID in self.bnode: object = self.bnode[nodeID] else: subject = BNode() self.bnode[nodeID] = subject object = subject else: object = subject = BNode(nodeID) next.start = self.node_element_start next.end = self.node_element_end else: if parse_type is not None: for att in atts: if att!=RDF.parseType and att!=RDF.ID: self.error("Property attr '%s' now allowed here" % att) if parse_type=="Resource": current.subject = object = BNode() current.char = self.property_element_char next.start = self.property_element_start next.end = self.property_element_end elif parse_type=="Collection": current.char = None object = current.list = RDF.nil #BNode()#self.parent.subject next.start = self.node_element_start next.end = self.list_node_element_end else: #if parse_type=="Literal": # All other values are treated as Literal # See: http://www.w3.org/TR/rdf-syntax-grammar/#parseTypeOtherPropertyElt object = Literal("", None, RDF.XMLLiteral) current.char = self.literal_element_char current.declared = {} next.start = self.literal_element_start next.char = self.literal_element_char next.end = self.literal_element_end current.object = object return else: object = None current.char = self.property_element_char next.start = self.node_element_start next.end = self.node_element_end datatype = current.datatype = atts.get(RDF.datatype, None) language = current.language if datatype is not None: # TODO: check that there are no atts other than datatype and id datatype = absolutize(datatype) else: for att in atts: if not att.startswith(RDFNS): predicate = absolutize(att) elif att in PROPERTY_ELEMENT_ATTRIBUTES: continue elif att in PROPERTY_ATTRIBUTE_EXCEPTIONS: self.error("""Invalid property attribute URI: %s""" % att) else: predicate = absolutize(att) if att==RDF.type: o = URIRef(atts[att]) else: o = Literal(atts[att], language, datatype) if object is None: object = BNode() self.store.add((object, predicate, o)) if object is None: current.data = "" current.object = None else: current.data = None current.object = object def property_element_char(self, data): current = self.current if current.data is not None: current.data += data def property_element_end(self, name, qname): current = self.current if current.data is not None and current.object is None: current.object = Literal(current.data, current.language, current.datatype) current.data = None if self.next.end==self.list_node_element_end: if current.object!=RDF.nil: self.store.add((current.list, RDF.rest, RDF.nil)) if current.object is not None: self.store.add((self.parent.subject, current.predicate, current.object)) if current.id is not None: self.add_reified(current.id, (self.parent.subject, current.predicate, current.object)) current.subject = None def list_node_element_end(self, name, qname): current = self.current if self.parent.list==RDF.nil: list = BNode() # Removed between 20030123 and 20030905 #self.store.add((list, RDF.type, LIST)) self.parent.list = list self.store.add((self.parent.list, RDF.first, current.subject)) self.parent.object = list self.parent.char = None else: list = BNode() # Removed between 20030123 and 20030905 #self.store.add((list, RDF.type, LIST)) self.store.add((self.parent.list, RDF.rest, list)) self.store.add((list, RDF.first, current.subject)) self.parent.list = list def literal_element_start(self, name, qname, attrs): current = self.current self.next.start = self.literal_element_start self.next.char = self.literal_element_char self.next.end = self.literal_element_end current.declared = self.parent.declared.copy() if name[0]: prefix = self._current_context[name[0]] if prefix: current.object = "<%s:%s" % (prefix, name[1]) else: current.object = "<%s" % name[1] if not name[0] in current.declared: current.declared[name[0]] = prefix if prefix: current.object += (' xmlns:%s="%s"' % (prefix, name[0])) else: current.object += (' xmlns="%s"' % name[0]) else: current.object = "<%s" % name[1] for (name, value) in attrs.items(): if name[0]: if not name[0] in current.declared: current.declared[name[0]] = self._current_context[name[0]] name = current.declared[name[0]] + ":" + name[1] else: name = name[1] current.object += (' %s=%s' % (name, quoteattr(value))) current.object += ">" def literal_element_char(self, data): self.current.object += escape(data) def literal_element_end(self, name, qname): if name[0]: prefix = self._current_context[name[0]] if prefix: end = u"" % (prefix, name[1]) else: end = u"" % name[1] else: end = u"" % name[1] self.parent.object += self.current.object + end rdflib-2.4.2/rdflib/syntax/parsers/N3Parser.py0000644000175000017500000000312411153616030020175 0ustar nachonachofrom rdflib import URIRef, BNode, Literal, RDF, Variable from rdflib.util import from_n3 from rdflib.syntax.parsers import Parser from rdflib.syntax.parsers.n3p.n3proc import N3Processor from rdflib.Graph import Graph, QuotedGraph, ConjunctiveGraph class N3Parser(Parser): def __init__(self): pass def parse(self, source, graph): # we're currently being handed a Graph, not a ConjunctiveGraph assert graph.store.context_aware # is this implied by formula_aware assert graph.store.formula_aware conj_graph = ConjunctiveGraph(store=graph.store) conj_graph.default_context = graph # TODO: CG __init__ should have a default_context arg # TODO: update N3Processor so that it can use conj_graph as the sink sink = Sink(conj_graph) if False: sink.quantify = lambda *args: True sink.flatten = lambda *args: True baseURI = graph.absolutize(source.getPublicId() or source.getSystemId() or "") p = N3Processor("nowhere", sink, baseURI=baseURI) # pass in "nowhere" so we can set data instead p.userkeys = True # bah p.data = source.getByteStream().read() # TODO getCharacterStream? p.parse() for prefix, namespace in p.bindings.items(): conj_graph.bind(prefix, namespace) class Sink(object): def __init__(self, graph): self.graph = graph def start(self, root): pass def statement(self, s, p, o, f): f.add((s, p, o)) def quantify(self, formula, var): #print "quantify(%s, %s)" % (formula, var) pass rdflib-2.4.2/rdflib/syntax/parsers/RDFaParser.py0000644000175000017500000002306711153616030020501 0ustar nachonacho""" RDFa parser. RDFa is a set of attributes used to embed RDF in XHTML. An important goal of RDFa is to achieve this RDF embedding without repeating existing XHTML content when that content is the metadata. REFERENCES: http://www.w3.org/2001/sw/BestPractices/HTML/2005-rdfa-syntax LICENSE: BSD CHANGE HISTORY: 2006/06/03 - Initial Version 2006/06/08 - Added support for role (as per primer not syntax spec) Added support for plaintext and flattening of XMLLiterals ... (Sections 5.1.1.2 and 5.1.2.1) Fixed plaintext bug where it was being resolved as CURIE Added support to skip reserved @rel keywords from: http://www.w3.org/TR/REC-html40/types.html#h-6.12 2006/08/12 - Changed reserved @rel resolution to include a '#' Fixed subject resolution for LINK/META when inside HEAD Fixed blank node extraction [_:address] -> [_:_:address] Added support for passing prefix mappings to the Graph via RDFaSink Added @id support as part of subject resolution Copyright (c) 2006, Elias Torres """ import sys, re, urllib, urlparse, cStringIO, string from xml.dom import pulldom from rdflib.syntax.parsers import Parser from rdflib.Graph import ConjunctiveGraph from rdflib.URIRef import URIRef from rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.Namespace import Namespace __version__ = "$Id: RDFaParser.py 1185 2007-07-12 16:46:36Z whit $" rdfa_attribs = ["about","property","rel","rev","href","content","role","id"] reserved_links = ['alternate', 'stylesheet', 'start', 'next', 'prev', 'contents', 'index', 'glossary', 'copyright', 'chapter', 'section', 'subsection', 'appendix', 'help', 'bookmark'] xhtml = Namespace("http://www.w3.org/1999/xhtml") xml = Namespace("http://www.w3.org/XML/1998/namespace") rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") class RDFaSink(object): def __init__(self, graph): self.graph = graph def __str__(self): return self.graph.serialize(format="pretty-xml") def triple(self, s, p, o): self.graph.add((s, p, o)) def prefix(self, prefix, ns): self.graph.bind(prefix, ns, override=False) _urifixer = re.compile('^([A-Za-z][A-Za-z0-9+-.]*://)(/*)(.*?)') def _urljoin(base, uri): uri = _urifixer.sub(r'\1\3', uri) return urlparse.urljoin(base, uri) class RDFaParser(Parser): def __init__(self): self.lang = None self.abouts = [] self.xmlbases = [] self.langs = [] self.elementStack = [None] self.bcounter = {} self.bnodes = {} self.sink = None def parse(self, source, sink, baseURI=None): self.sink = RDFaSink(sink) self.triple = self.sink.triple self.prefix = self.sink.prefix self.baseuri = baseURI or source.getPublicId() f = source.getByteStream() events = pulldom.parse(f) self.handler = events.pulldom for (event, node) in events: if event == pulldom.START_DOCUMENT: self.abouts += [(URIRef(""), node)] if event == pulldom.END_DOCUMENT: assert len(self.elementStack) == 0 if event == pulldom.START_ELEMENT: # keep track of parent node self.elementStack += [node] #if __debug__: print [e.tagName for e in self.elementStack if e] found = filter(lambda x:x in node.attributes.keys(),rdfa_attribs) # keep track of xml:lang xml:base baseuri = node.getAttributeNS(xml,"base") or node.getAttribute("xml:base") or self.baseuri self.baseuri = _urljoin(self.baseuri, baseuri) self.xmlbases.append(self.baseuri) if node.hasAttributeNS(xml,"lang") or node.hasAttribute("xml:lang"): lang = node.getAttributeNS(xml, 'lang') or node.getAttribute('xml:lang') if lang == '': # xml:lang could be explicitly set to '', we need to capture that lang = None else: # if no xml:lang is specified, use parent lang lang = self.lang self.lang = lang self.langs.append(lang) # node is not an RDFa element. if len(found) == 0: continue parentNode = self.elementStack[-2] if "about" in found: self.abouts += [(self.extractCURIEorURI(node.getAttribute("about")),node)] elif "id" in found: self.abouts += [(self.extractCURIEorURI("#" + node.getAttribute("id")),node)] subject = self.abouts[-1][0] # meta/link subject processing if(node.tagName == "meta" or node.tagName == "link"): if not("about" in found) and parentNode: if parentNode and parentNode.tagName == "head": subject = URIRef("") elif(parentNode.hasAttribute("about")): subject = self.extractCURIEorURI(parentNode.getAttribute("about")) elif parentNode.hasAttributeNS(xml,"id") or parentNode.hasAttribute("id"): # TODO: is this the right way to process xml:id by adding a '#' id = parentNode.getAttributeNS(xml,"id") or parentNode.getAttribute("id") subject = self.extractCURIEorURI("#" + id) else: subject = self.generateBlankNode(parentNode) if 'property' in found: predicate = self.extractCURIEorURI(node.getAttribute('property')) literal = None datatype = None plaintext = False if node.hasAttribute('datatype'): sdt = node.getAttribute('datatype') if sdt <> 'plaintext': datatype = self.extractCURIEorURI(sdt) else: plaintext = True if node.hasAttribute("content"): literal = Literal(node.getAttribute("content"), lang=lang, datatype=datatype) else: events.expandNode(node) # because I expanded, I won't get an END_ELEMENT self._popStacks(event, node) content = "" for child in node.childNodes: if datatype or plaintext: content += self._getNodeText(child) else: content += child.toxml() content = content.strip() literal = Literal(content,datatype=datatype or rdf.XMLLiteral) if literal: self.triple(subject, predicate, literal) if "rel" in found: rel = node.getAttribute("rel").strip() if string.lower(rel) in reserved_links: rel = xhtml["#" + string.lower(rel)] predicate = self.extractCURIEorURI(rel) if node.hasAttribute("href"): object = self.extractCURIEorURI(node.getAttribute("href")) self.triple(subject, predicate, object) if "rev" in found: predicate = self.extractCURIEorURI(node.getAttribute("rev")) if node.hasAttribute("href"): object = self.extractCURIEorURI(node.getAttribute("href")) self.triple(object, predicate, subject) # role is in the primer, but not in the syntax. # could be deprecated. # Assumptions: # - Subject resolution as always (including meta/link) # - Attribute Value is a CURIE or URI # - It adds another triple, besides prop, rel, rev. if "role" in found: type = self.extractCURIEorURI(node.getAttribute('role')) self.triple(subject, rdf.type, type) if event == pulldom.END_ELEMENT: self._popStacks(event, node) # share with sink any prefix mappings for nsc in self.handler._ns_contexts: for ns, prefix in nsc.items(): self.prefix(prefix, ns) f.close() def _getNodeText(self, node): if node.nodeType in (3,4): return node.nodeValue text = '' for child in node.childNodes: if child.nodeType in (3,4): text = text + child.nodeValue return text def generateBlankNode(self, parentNode): name = parentNode.tagName if self.bnodes.has_key(parentNode): return self.bnodes[parentNode] if self.bcounter.has_key(name): self.bcounter[name] = self.bcounter[name] + 1 else: self.bcounter[name] = 0 self.bnodes[parentNode] = BNode("%s%d" % (name, self.bcounter[name])) return self.bnodes[parentNode] def extractCURIEorURI(self, resource): if(len(resource) > 0 and resource[0] == "[" and resource[-1] == "]"): resource = resource[1:-1] # resolve prefixes # TODO: check whether I need to reverse the ns_contexts if(resource.find(":") > -1): rpre,rsuf = resource.split(":", 1) for nsc in self.handler._ns_contexts: for ns, prefix in nsc.items(): if prefix == rpre: resource = ns + rsuf # TODO: is this enough to check for bnodes? if(len(resource) > 0 and resource[0:2] == "_:"): return BNode(resource[2:]) return URIRef(self.resolveURI(resource)) def resolveURI(self, uri): return _urljoin(self.baseuri or '', uri) def _popStacks(self, event, node): # check abouts if len(self.abouts) <> 0: about, aboutnode = self.abouts[-1] if aboutnode == node: self.abouts.pop() # keep track of nodes going out of scope self.elementStack.pop() # track xml:base and xml:lang going out of scope if self.xmlbases: self.xmlbases.pop() if self.xmlbases and self.xmlbases[-1]: self.baseuri = self.xmlbases[-1] if self.langs: self.langs.pop() if self.langs and self.langs[-1]: self.lang = self.langs[-1] if __name__ == "__main__": store = ConjunctiveGraph() store.load(sys.argv[1], format="rdfa") print store.serialize(format="pretty-xml") rdflib-2.4.2/rdflib/syntax/parsers/__init__.py0000644000175000017500000000016311153616030020277 0ustar nachonacho class Parser(object): def __init__(self): pass def parse(self, source, sink): pass rdflib-2.4.2/rdflib/syntax/parsers/RDFXMLParser.py0000644000175000017500000000240611153616030020713 0ustar nachonachofrom rdflib.syntax.parsers import Parser from xml.sax import make_parser from xml.sax.saxutils import handler from xml.sax.handler import ErrorHandler from rdflib.syntax.parsers.RDFXMLHandler import RDFXMLHandler def create_parser(target, store): parser = make_parser() # Workaround for bug in expatreader.py. Needed when # expatreader is trying to guess a prefix. parser.start_namespace_decl("xml", "http://www.w3.org/XML/1998/namespace") parser.setFeature(handler.feature_namespaces, 1) rdfxml = RDFXMLHandler(store) rdfxml.setDocumentLocator(target) #rdfxml.setDocumentLocator(_Locator(self.url, self.parser)) parser.setContentHandler(rdfxml) parser.setErrorHandler(ErrorHandler()) return parser class RDFXMLParser(Parser): def __init__(self): pass def parse(self, source, sink, **args): self._parser = create_parser(source, sink) content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: content_handler.preserve_bnode_ids = preserve_bnode_ids # We're only using it once now #content_handler.reset() #self._parser.reset() self._parser.parse(source) rdflib-2.4.2/rdflib/syntax/parsers/NTParser.py0000644000175000017500000000105411153616030020236 0ustar nachonachofrom rdflib.syntax.parsers import Parser from rdflib.syntax.parsers.ntriples import NTriplesParser class NTSink(object): def __init__(self, graph): self.graph = graph def triple(self, s, p, o): self.graph.add((s, p, o)) import codecs class NTParser(Parser): def __init__(self): super(NTParser, self).__init__() def parse(self, source, sink, baseURI=None): f = source.getByteStream() # TODO getCharacterStream? parser = NTriplesParser(NTSink(sink)) parser.parse(f) f.close() rdflib-2.4.2/rdflib/syntax/__init__.py0000644000175000017500000000001611153616030016615 0ustar nachonacho# RDF Library rdflib-2.4.2/rdflib/syntax/serializers/0000755000175000017500000000000011204354476017055 5ustar nachonachordflib-2.4.2/rdflib/syntax/serializers/RecursiveSerializer.py0000644000175000017500000000766511153616030023434 0ustar nachonachofrom rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.URIRef import URIRef from rdflib.syntax.serializers.AbstractSerializer import AbstractSerializer from rdflib import RDF, RDFS class RecursiveSerializer(AbstractSerializer): topClasses = [RDFS.Class] predicateOrder = [RDF.type, RDFS.label] maxDepth = 10 indentString = u" " def __init__(self, store): super(RecursiveSerializer, self).__init__(store) self.stream = None self.reset() def addNamespace(self, prefix, uri): self.namespaces[prefix] = uri def checkSubject(self, subject): """Check to see if the subject should be serialized yet""" if ((self.isDone(subject)) or (subject not in self._subjects) or ((subject in self._topLevels) and (self.depth > 1)) or (isinstance(subject, URIRef) and (self.depth >= self.maxDepth)) ): return False return True def isDone(self, subject): """Return true if subject is serialized""" return subject in self._serialized def orderSubjects(self): seen = {} subjects = [] for classURI in self.topClasses: members = list(self.store.subjects(RDF.type, classURI)) members.sort() for member in members: subjects.append(member) self._topLevels[member] = True seen[member] = True recursable = [(isinstance(subject,BNode), self.refCount(subject), subject) for subject in self._subjects if subject not in seen] recursable.sort() subjects.extend([subject for (isbnode, refs, subject) in recursable]) return subjects def preprocess(self): for triple in self.store.triples((None,None,None)): self.preprocessTriple(triple) def preprocessTriple(self, (s,p,o)): references = self.refCount(o) + 1 self._references[o] = references self._subjects[s] = True def refCount(self, node): """Return the number of times this node has been referenced in the object position""" return self._references.get(node, 0) def reset(self): self.depth = 0 self.lists = {} self.namespaces = {} self._references = {} self._serialized = {} self._subjects = {} self._topLevels = {} def buildPredicateHash(self, subject): """Build a hash key by predicate to a list of objects for the given subject""" properties = {} for s,p,o in self.store.triples((subject, None, None)): oList = properties.get(p, []) oList.append(o) properties[p] = oList return properties def sortProperties(self, properties): """Take a hash from predicate uris to lists of values. Sort the lists of values. Return a sorted list of properties.""" # Sort object lists for prop, objects in properties.items(): objects.sort() # Make sorted list of properties propList = [] seen = {} for prop in self.predicateOrder: if (prop in properties) and (prop not in seen): propList.append(prop) seen[prop] = True props = properties.keys() props.sort() for prop in props: if prop not in seen: propList.append(prop) seen[prop] = True return propList def subjectDone(self, subject): """Mark a subject as done.""" self._serialized[subject] = True def indent(self, modifier=0): """Returns indent string multiplied by the depth""" return (self.depth+modifier)*self.indentString def write(self, text): """Write text in given encoding.""" self.stream.write(text.encode(self.encoding, 'replace')) rdflib-2.4.2/rdflib/syntax/serializers/XMLWriter.py0000644000175000017500000000423211153616030021253 0ustar nachonachoimport codecs from xml.sax.saxutils import quoteattr, escape class XMLWriter(object): def __init__(self, stream, namespace_manager, encoding=None, decl=1): encoding = encoding or 'utf-8' encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) self.stream = stream = stream_writer(stream) if decl: stream.write('' % encoding) self.element_stack = [] self.nm = namespace_manager self.closed = True def __get_indent(self): return " " * len(self.element_stack) indent = property(__get_indent) def __close_start_tag(self): if not self.closed: # TODO: self.closed = True self.stream.write(">") def push(self, uri): nm = self.nm self.__close_start_tag() write = self.stream.write write("\n") write(self.indent) write("<%s" % nm.qname(uri)) self.element_stack.append(uri) self.closed = False self.parent = False def pop(self, uri=None): top = self.element_stack.pop() if uri: assert uri==top write = self.stream.write if not self.closed: self.closed = True write("/>") else: if self.parent: write("\n") write(self.indent) write("" % self.nm.qname(uri)) self.parent = True def namespaces(self, namespaces): write = self.stream.write write("\n") for prefix, namespace in namespaces: if prefix: write(" xmlns:%s='%s'\n" % (prefix, namespace)) else: write(" xmlns='%s'\n" % namespace) def attribute(self, uri, value): write = self.stream.write write(" %s=%s" % (self.nm.qname(uri), quoteattr(value))) def text(self, text): self.__close_start_tag() if "<" in text and ">" in text and not "]]>" in text: self.stream.write("") else: self.stream.write(escape(text)) rdflib-2.4.2/rdflib/syntax/serializers/PrettyXMLSerializer.py0000644000175000017500000001247211153616030023325 0ustar nachonachofrom rdflib import RDF from rdflib import URIRef, Literal, BNode from rdflib.util import first, uniq, more_than from rdflib.Collection import Collection from rdflib.syntax.serializers import Serializer from rdflib.syntax.serializers.XMLWriter import XMLWriter XMLLANG = "http://www.w3.org/XML/1998/namespacelang" # TODO: def fix(val): "strip off _: from nodeIDs... as they are not valid NCNames" if val.startswith("_:"): return val[2:] else: return val class PrettyXMLSerializer(Serializer): def __init__(self, store, max_depth=3): super(PrettyXMLSerializer, self).__init__(store) self.forceRDFAbout=set() def serialize(self, stream, base=None, encoding=None, **args): self.__serialized = {} store = self.store self.base = base self.max_depth = args.get("max_depth", 3) assert self.max_depth>0, "max_depth must be greater than 0" self.nm = nm = store.namespace_manager self.writer = writer = XMLWriter(stream, nm, encoding) namespaces = {} possible = uniq(store.predicates()) + uniq(store.objects(None, RDF.type)) for predicate in possible: prefix, namespace, local = nm.compute_qname(predicate) namespaces[prefix] = namespace namespaces["rdf"] = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" writer.push(RDF.RDF) writer.namespaces(namespaces.iteritems()) # Write out subjects that can not be inline for subject in store.subjects(): if (None, None, subject) in store: if (subject, None, subject) in store: self.subject(subject, 1) else: self.subject(subject, 1) # write out anything that has not yet been reached for subject in store.subjects(): self.subject(subject, 1) writer.pop(RDF.RDF) # Set to None so that the memory can get garbage collected. self.__serialized = None def subject(self, subject, depth=1): store = self.store writer = self.writer if not subject in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDF.Description writer.push(element) if isinstance(subject, BNode): def subj_as_obj_more_than(ceil): return more_than(store.triples((None, None, subject)), ceil) if (depth == 1 and subj_as_obj_more_than(0) ) or subj_as_obj_more_than(1): writer.attribute(RDF.nodeID, fix(subject)) else: writer.attribute(RDF.about, self.relativize(subject)) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate==RDF.type and object==type): self.predicate(predicate, object, depth+1) writer.pop(element) elif subject in self.forceRDFAbout: writer.push(RDF.Description) writer.attribute(RDF.about, self.relativize(subject)) writer.pop(RDF.Description) self.forceRDFAbout.remove(subject) def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): attributes = "" if object.language: writer.attribute(XMLLANG, object.language) if object.datatype: writer.attribute(RDF.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) else: items = [] for item in store.items(object): # add a strict option to items? if isinstance(item, Literal): items = None # can not serialize list with literal values in them with rdf/xml else: items.append(item) if first(store.objects(object, RDF.first)): # may not have type RDF.List collection = object self.__serialized[object] = 1 # TODO: warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List writer.attribute(RDF.parseType, "Collection") col=Collection(store,object) for item in col: self.forceRDFAbout.add(item) self.subject(item) self.__serialized[item] = 1 else: if depth<=self.max_depth: self.subject(object, depth+1) elif isinstance(object, BNode): writer.attribute(RDF.nodeID, fix(object)) else: writer.attribute(RDF.resource, self.relativize(object)) writer.pop(predicate) rdflib-2.4.2/rdflib/syntax/serializers/N3Serializer.py0000644000175000017500000000636111153616030021735 0ustar nachonacho# rdflib/syntax/serializers/N3Serializer.py from rdflib.syntax.serializers.TurtleSerializer import TurtleSerializer, SUBJECT, VERB, OBJECT from rdflib.Graph import Graph class N3Serializer(TurtleSerializer): short_name = "n3" def __init__(self, store, parent=None): super(N3Serializer, self).__init__(store) self.parent = parent def reset(self): super(N3Serializer, self).reset() self._stores = {} def getQName(self, uri): qname = None if self.parent is not None: qname = self.parent.getQName(uri) if qname is None: qname = super(N3Serializer, self).getQName(uri) return qname def indent(self, modifier=0): indent = super(N3Serializer, self).indent(modifier) if self.parent is not None: indent += self.parent.indent(modifier) return indent def p_clause(self, node, ignore=SUBJECT): if isinstance(node, Graph): self.subjectDone(node) self.write(' {') self.depth += 1 serializer = N3Serializer(node, parent=self) serializer.serialize(self.stream) self.depth -= 1 self.write('\n'+self.indent()+' }') return True else: return False def s_clause(self, subject): if isinstance(subject, Graph): self.write('\n'+self.indent()) self.p_clause(subject, SUBJECT) self.predicateList(subject) self.write('. ') return True else: return False def preprocessTriple(self, triple): super(N3Serializer, self).preprocessTriple(triple) if isinstance(triple[0], Graph): for t in triple[0]: self.preprocessTriple(t) if isinstance(triple[2], Graph): for t in triple[2]: self.preprocessTriple(t) def statement(self, subject): self.subjectDone(subject) properties = self.buildPredicateHash(subject) if len(properties) == 0: return if not self.s_clause(subject): super(N3Serializer, self).statement(subject) def path(self, node, position): if not self.p_clause(node, position): super(N3Serializer, self).path(node, position) def startDocument(self): ns_list= list(self.namespaces.items()) ns_list.sort() for prefix, uri in ns_list: self.write('\n'+self.indent()+'@prefix %s: <%s>.'%(prefix, uri)) if len(ns_list) > 0: self.write('\n') #if not isinstance(self.store, N3Store): # return #all_list = [self.label(var) for var in self.store.get_universals(recurse=False)] #all_list.sort() #some_list = [self.label(var) for var in self.store.get_existentials(recurse=False)] #some_list.sort() #for var in all_list: # self.write('\n'+self.indent()+'@forAll %s. '%var) #for var in some_list: # self.write('\n'+self.indent()+'@forSome %s. '%var) #if (len(all_list) + len(some_list)) > 0: # self.write('\n') rdflib-2.4.2/rdflib/syntax/serializers/NTSerializer.py0000644000175000017500000000235211153616030021772 0ustar nachonacho#$Id: NTSerializer.py,v 1.6 2003/10/29 15:25:24 kendall Exp $ from rdflib.syntax.serializers import Serializer class NTSerializer(Serializer): def __init__(self, store): """ I serialize RDF graphs in NTriples format. """ super(NTSerializer, self).__init__(store) def serialize(self, stream, base=None, encoding=None, **args): if base is not None: print "TODO: NTSerializer does not support base" encoding = self.encoding write = lambda triple: stream.write((triple[0].n3() + u" " + \ triple[1].n3() + u" " + _xmlcharref_encode(triple[2].n3()) + u".\n").encode(encoding, "replace")) map(write, self.store) # from http://code.activestate.com/recipes/303668/ def _xmlcharref_encode(unicode_data, encoding="ascii"): """Emulate Python 2.3's 'xmlcharrefreplace' encoding error handler.""" chars = [] # Step through the unicode_data string one character at a time in # order to catch unencodable characters: for char in unicode_data: try: chars.append(char.encode(encoding, 'strict')) except UnicodeError: chars.append('\u%04X' % ord(char)) return ''.join(chars) rdflib-2.4.2/rdflib/syntax/serializers/QNameProvider.py0000644000175000017500000000273211153616030022135 0ustar nachonacho from rdflib.syntax.xml_names import split_uri XMLLANG = u"http://www.w3.org/XML/1998/namespace#lang" class QNameProvider(object): def __init__(self): self.__cache = {} self.__namespace = {} # mapping for prefix to namespace self.__prefix = {} self.set_prefix("xml", u"http://www.w3.org/XML/1998/namespace") # TODO: explain -- the following is needed for XMLLANG as defined above to work self.__prefix[u"http://www.w3.org/XML/1998/namespace#"] = "xml" def get(self, uri): qname = self.__cache.get(uri, None) if qname is None: self.compute(uri) return self.get(uri) else: return qname def compute(self, uri): if not uri in self.__cache: namespace, name = split_uri(uri) prefix = self.__prefix.get(namespace, None) if prefix is None: prefix = "_%s" % len(self.__namespace) self.set_prefix(prefix, namespace) if prefix=="": self.__cache[uri] = name else: self.__cache[uri] = ":".join((prefix, name)) def set_prefix(self, prefix, namespace): if prefix in self.__namespace: raise "NYI: prefix already set" self.__namespace[prefix] = namespace self.__prefix[namespace] = prefix def namespaces(self): for prefix, namespace in self.__namespace.iteritems(): yield prefix, namespace rdflib-2.4.2/rdflib/syntax/serializers/AbstractSerializer.py0000644000175000017500000000071111153616030023211 0ustar nachonachofrom rdflib import URIRef class AbstractSerializer(object): def __init__(self, store): self.store = store self.encoding = "UTF-8" self.base = None def serialize(self, stream, base=None, encoding=None, **args): """Abstract method""" def relativize(self, uri): base = self.base if base is not None and uri.startswith(base): uri = URIRef(uri.replace(base, "", 1)) return uri rdflib-2.4.2/rdflib/syntax/serializers/TurtleSerializer.py0000644000175000017500000001401311153616030022725 0ustar nachonachoimport urlparse from xml.sax.saxutils import escape, quoteattr from rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.URIRef import URIRef from rdflib.syntax.xml_names import split_uri from rdflib.syntax.serializers.RecursiveSerializer import RecursiveSerializer from rdflib.exceptions import Error from rdflib import RDF, RDFS SUBJECT = 0 VERB = 1 OBJECT = 2 class TurtleSerializer(RecursiveSerializer): short_name="turtle" indentString = " " def __init__(self, store): super(TurtleSerializer, self).__init__(store) self.reset() self.stream = None def reset(self): super(TurtleSerializer, self).reset() self._shortNames = {} self._started = False def getQName(self, uri): if isinstance(uri, URIRef): if self.base and uri.startswith(self.base): # this feels too simple, but I dont see why I wont work :) -Gunnar return "<%s>"%uri[len(self.base):] try: parts = self.store.compute_qname(uri) except Exception, e: parts = None if parts: prefix, namespace, local = parts if local.find(".")!=-1: # Local parts with . will mess up serialization return None self.addNamespace(prefix, namespace) return u"%s:%s" % (prefix, local) return None def preprocessTriple(self, triple): super(TurtleSerializer, self).preprocessTriple(triple) for node in triple: self.getQName(node) p = triple[1] if isinstance(p, BNode): self._references[p] = self.refCount(p) +1 def label(self, node): qname = self.getQName(node) if qname is None: return node.n3() return qname def startDocument(self): self._started = True ns_list= list(self.store.namespaces()) ns_list.sort() if len(ns_list) == 0: return for prefix, uri in ns_list: self.write('\n'+self.indent()+'@prefix %s: <%s>.'%(prefix, uri)) self.write('\n') def endDocument(self): pass def isValidList(self,l): """Checks if l is a valid RDF list, i.e. no nodes have other properties.""" try: if not self.store.value(l, RDF.first): return False except: return False while l: if l!=RDF.nil and len(list(self.store.predicate_objects(l)))!=2: return False l = self.store.value(l, RDF.rest) return True def doList(self,l): while l: item = self.store.value(l, RDF.first) if item: self.path(item, SUBJECT) self.subjectDone(l) l = self.store.value(l, RDF.rest) def p_squared(self, node, position): if (not isinstance(node, BNode) or node in self._serialized or self.refCount(node) > 1 or position == SUBJECT): return False if self.isValidList(node): # this is a list self.write(' (') self.depth+=2 self.doList(node) self.depth-=2 self.write(' )') return True self.subjectDone(node) self.write(' [') self.depth += 2 self.predicateList(node) self.depth -= 2 self.write(']') return True def p_default(self, node, ignore): self.write(" "+self.label(node)) return True def path(self, node, position): if not (self.p_squared(node, position) or self.p_default(node, position)): raise Error("Cannot serialize node '%s'"%(node, )) def verb(self, node): if node == RDF.type: self.write(' a') else: self.path(node, VERB) def objectList(self, objects): if len(objects) == 0: return self.path(objects[0], OBJECT) for obj in objects[1:]: self.write(',\n'+self.indent(2)) self.path(obj, OBJECT) def predicateList(self, subject): properties = self.buildPredicateHash(subject) propList = self.sortProperties(properties) if len(propList) == 0: return self.verb(propList[0]) self.objectList(properties[propList[0]]) for predicate in propList[1:]: self.write(';\n'+self.indent(1)) self.verb(predicate) self.objectList(properties[predicate]) def s_squared(self, subject): if (self.refCount(subject) > 0) or not isinstance(subject, BNode): return False self.write('\n'+self.indent()+" [") self.depth+=1 self.predicateList(subject) self.depth-=1 self.write('].') return True def s_default(self, subject): self.write('\n'+self.indent()) self.path(subject, SUBJECT) self.predicateList(subject) self.write('. ') return True def statement(self, subject): self.subjectDone(subject) if not self.s_squared(subject): self.s_default(subject) def serialize(self, stream, base=None, encoding=None, **args): self.reset() self.stream = stream self.base=base # In newer rdflibs these are always in the namespace manager #self.store.prefix_mapping('rdf', RDFNS) #self.store.prefix_mapping('rdfs', RDFSNS) self.preprocess() subjects_list = self.orderSubjects() self.startDocument() firstTime = True for subject in subjects_list: if not self.isDone(subject): if firstTime: firstTime = False else: self.write('\n') self.statement(subject) self.endDocument() rdflib-2.4.2/rdflib/syntax/serializers/__init__.py0000644000175000017500000000070111153616030021152 0ustar nachonachofrom rdflib import URIRef class Serializer(object): def __init__(self, store): self.store = store self.encoding = "UTF-8" self.base = None def serialize(self, stream, base=None, encoding=None, **args): """Abstract method""" def relativize(self, uri): base = self.base if base is not None and uri.startswith(base): uri = URIRef(uri.replace(base, "", 1)) return uri rdflib-2.4.2/rdflib/syntax/serializers/XMLSerializer.py0000644000175000017500000000776511203344036022126 0ustar nachonachofrom __future__ import generators from rdflib.syntax.serializers import Serializer from rdflib.URIRef import URIRef from rdflib.Literal import Literal from rdflib.BNode import BNode from rdflib.util import uniq from rdflib.exceptions import Error from rdflib.syntax.xml_names import split_uri from xml.sax.saxutils import quoteattr, escape class XMLSerializer(Serializer): def __init__(self, store): super(XMLSerializer, self).__init__(store) def __bindings(self): store = self.store nm = store.namespace_manager bindings = {} for predicate in uniq(store.predicates()): prefix, namespace, name = nm.compute_qname(predicate) bindings[prefix] = URIRef(namespace) RDFNS = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#") if "rdf" in bindings: assert bindings["rdf"]==RDFNS else: bindings["rdf"] = RDFNS for prefix, namespace in bindings.iteritems(): yield prefix, namespace def serialize(self, stream, base=None, encoding=None, **args): self.base = base self.__stream = stream self.__serialized = {} encoding = self.encoding self.write = write = lambda uni: stream.write(uni.encode(encoding, 'replace')) # startDocument write('\n' % self.encoding) # startRDF write('\n') # write out triples by subject for subject in self.store.subjects(): self.subject(subject, 1) # endRDF write( "\n" ) # Set to None so that the memory can get garbage collected. #self.__serialized = None del self.__serialized def subject(self, subject, depth=1): if not subject in self.__serialized: self.__serialized[subject] = 1 if isinstance(subject, (BNode,URIRef)): write = self.write indent = " " * depth element_name = "rdf:Description" if isinstance(subject, BNode): write( '%s<%s rdf:nodeID="%s"' % (indent, element_name, subject)) else: uri = quoteattr(self.relativize(subject)) write( "%s<%s rdf:about=%s" % (indent, element_name, uri)) if (subject, None, None) in self.store: write( ">\n" ) for predicate, object in self.store.predicate_objects(subject): self.predicate(predicate, object, depth+1) write( "%s\n" % (indent, element_name)) else: write( "/>\n" ) def predicate(self, predicate, object, depth=1): write = self.write indent = " " * depth qname = self.store.namespace_manager.qname(predicate) if isinstance(object, Literal): attributes = "" if object.language: attributes += ' xml:lang="%s"'%object.language if object.datatype: attributes += ' rdf:datatype="%s"'%object.datatype write("%s<%s%s>%s\n" % (indent, qname, attributes, escape(object), qname) ) else: if isinstance(object, BNode): write('%s<%s rdf:nodeID="%s"/>\n' % (indent, qname, object)) else: write("%s<%s rdf:resource=%s/>\n" % (indent, qname, quoteattr(self.relativize(object)))) rdflib-2.4.2/rdflib/util.py0000644000175000017500000001567511153616035014533 0ustar nachonachofrom rdflib.URIRef import URIRef from rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.Variable import Variable from rdflib.Graph import Graph, QuotedGraph from rdflib.Statement import Statement from rdflib.exceptions import SubjectTypeError, PredicateTypeError, ObjectTypeError, ContextTypeError from rdflib.compat import rsplit from cPickle import loads def list2set(seq): seen = set() return [ x for x in seq if x not in seen and not seen.add(x)] def first(seq): for result in seq: return result return None def uniq(sequence, strip=0): """removes duplicate strings from the sequence.""" set = {} if strip: map(lambda val, default: set.__setitem__(val.strip(), default), sequence, []) else: map(set.__setitem__, sequence, []) return set.keys() def more_than(sequence, number): "Returns 1 if sequence has more items than number and 0 if not." i = 0 for item in sequence: i += 1 if i > number: return 1 return 0 def term(str, default=None): """See also from_n3""" if not str: return default elif str.startswith("<") and str.endswith(">"): return URIRef(str[1:-1]) elif str.startswith('"') and str.endswith('"'): return Literal(str[1:-1]) elif str.startswith("_"): return BNode(str) else: msg = "Unknown Term Syntax: '%s'" % str raise Exception(msg) from time import mktime, time, gmtime, localtime, timezone, altzone, daylight def date_time(t=None, local_time_zone=False): """http://www.w3.org/TR/NOTE-datetime ex: 1997-07-16T19:20:30Z >>> date_time(1126482850) '2005-09-11T23:54:10Z' @@ this will change depending on where it is run #>>> date_time(1126482850, local_time_zone=True) #'2005-09-11T19:54:10-04:00' >>> date_time(1) '1970-01-01T00:00:01Z' >>> date_time(0) '1970-01-01T00:00:00Z' """ if t is None: t = time() if local_time_zone: time_tuple = localtime(t) if time_tuple[8]: tz_mins = altzone // 60 else: tz_mins = timezone // 60 tzd = "-%02d:%02d" % (tz_mins // 60, tz_mins % 60) else: time_tuple = gmtime(t) tzd = "Z" year, month, day, hh, mm, ss, wd, y, z = time_tuple s = "%0004d-%02d-%02dT%02d:%02d:%02d%s" % ( year, month, day, hh, mm, ss, tzd) return s def parse_date_time(val): """always returns seconds in UTC # tests are written like this to make any errors easier to understand >>> parse_date_time('2005-09-11T23:54:10Z') - 1126482850.0 0.0 >>> parse_date_time('2005-09-11T16:54:10-07:00') - 1126482850.0 0.0 >>> parse_date_time('1970-01-01T00:00:01Z') - 1.0 0.0 >>> parse_date_time('1970-01-01T00:00:00Z') - 0.0 0.0 >>> parse_date_time("2005-09-05T10:42:00") - 1125916920.0 0.0 """ if "T" not in val: val += "T00:00:00Z" ymd, time = val.split("T") hms, tz_str = time[0:8], time[8:] if not tz_str or tz_str=="Z": time = time[:-1] tz_offset = 0 else: signed_hrs = int(tz_str[:3]) mins = int(tz_str[4:6]) secs = (cmp(signed_hrs, 0) * mins + signed_hrs * 60) * 60 tz_offset = -secs year, month, day = ymd.split("-") hour, minute, second = hms.split(":") t = mktime((int(year), int(month), int(day), int(hour), int(minute), int(second), 0, 0, 0)) t = t - timezone + tz_offset return t def from_n3(s, default=None, backend=None): """ Creates the Identifier corresponding to the given n3 string. WARNING: untested, may contain bugs. TODO: add test cases.""" if not s: return default if s.startswith('<'): return URIRef(s[1:-1]) elif s.startswith('"'): # TODO: would a regex be faster? value, rest = rsplit(s, '"', 1) value = value[1:] # strip leading quote if rest.startswith("@"): if "^^" in rest: language, rest = rsplit(rest, '^^', 1) language = language[1:] # strip leading at sign else: language = rest[1:] # strip leading at sign rest = '' else: language = None if rest.startswith("^^"): datatype = rest[3:-1] else: datatype = None value = value.replace('\\"', '"').replace('\\\\', '\\').decode("unicode-escape") return Literal(value, language, datatype) elif s.startswith('{'): identifier = from_n3(s[1:-1]) return QuotedGraph(backend, identifier) elif s.startswith('['): identifier = from_n3(s[1:-1]) return Graph(backend, identifier) else: if s.startswith("_:"): return BNode(s[2:]) else: return BNode(s) def check_context(c): if not (isinstance(c, URIRef) or \ isinstance(c, BNode)): raise ContextTypeError("%s:%s" % (c, type(c))) def check_subject(s): """ Test that s is a valid subject identifier.""" if not (isinstance(s, URIRef) or isinstance(s, BNode)): raise SubjectTypeError(s) def check_predicate(p): """ Test that p is a valid predicate identifier.""" if not isinstance(p, URIRef): raise PredicateTypeError(p) def check_object(o): """ Test that o is a valid object identifier.""" if not (isinstance(o, URIRef) or \ isinstance(o, Literal) or \ isinstance(o, BNode)): raise ObjectTypeError(o) def check_statement((s, p, o)): if not (isinstance(s, URIRef) or isinstance(s, BNode)): raise SubjectTypeError(s) if not isinstance(p, URIRef): raise PredicateTypeError(p) if not (isinstance(o, URIRef) or \ isinstance(o, Literal) or \ isinstance(o, BNode)): raise ObjectTypeError(o) def check_pattern((s, p, o)): if s and not (isinstance(s, URIRef) or isinstance(s, BNode)): raise SubjectTypeError(s) if p and not isinstance(p, URIRef): raise PredicateTypeError(p) if o and not (isinstance(o, URIRef) or \ isinstance(o, Literal) or \ isinstance(o, BNode)): raise ObjectTypeError(o) def graph_to_dot(graph, dot): """ Turns graph into dot (graphviz graph drawing format) using pydot. """ import pydot nodes = {} for s, o in graph.subject_objects(): for i in s,o: if i not in nodes.keys(): nodes[i] = i for s, p, o in graph.triples((None,None,None)): dot.add_edge(pydot.Edge(nodes[s], nodes[o], label=p)) if __name__ == "__main__": # try to make the tests work outside of the time zone they were written in #import os, time #os.environ['TZ'] = 'US/Pacific' #try: # time.tzset() #except AttributeError, e: # print e #pass # tzset missing! see # http://mail.python.org/pipermail/python-dev/2003-April/034480.html import doctest doctest.testmod() rdflib-2.4.2/rdflib/BNode.py0000644000175000017500000000606211153616035014533 0ustar nachonacho# TODO: where can we move _unique_id and _serial_number_generator? from string import ascii_letters from random import choice try: from hashlib import md5 except ImportError: from md5 import md5 def _unique_id(): """Create a (hopefully) unique prefix""" id = "" for i in xrange(0,8): id += choice(ascii_letters) return id def _serial_number_generator(): i = 0 while 1: yield i i = i + 1 from rdflib.Identifier import Identifier from rdflib.syntax.xml_names import is_ncname import threading bNodeLock = threading.RLock() class BNode(Identifier): """ Blank Node: http://www.w3.org/TR/rdf-concepts/#section-blank-nodes "In non-persistent O-O software construction, support for object identity is almost accidental: in the simplest implementation, each object resides at a certain address, and a reference to the object uses that address, which serves as immutable object identity. ... Maintaining object identity in shared databases raises problems: every client that needs to create objects must obtain a unique identity for them; " -- Bertand Meyer """ __slots__ = () def __new__(cls, value=None, # only store implementations should pass in a value _sn_gen=_serial_number_generator(), _prefix=_unique_id()): if value==None: # so that BNode values do not # collide with ones created with a different instance of this module # at some other time. bNodeLock.acquire() node_id = _sn_gen.next() bNodeLock.release() value = "%s%s" % (_prefix, node_id) else: # TODO: check that value falls within acceptable bnode value range # for RDF/XML needs to be something that can be serialzed as a nodeID # for N3 ?? # Unless we require these constraints be enforced elsewhere? pass #assert is_ncname(unicode(value)), "BNode identifiers must be valid NCNames" return Identifier.__new__(cls, value) def n3(self): return "_:%s" % self def __getnewargs__(self): return (unicode(self), ) def __reduce__(self): return (BNode, (unicode(self),)) def __ne__(self, other): return not self.__eq__(other) def __eq__(self, other): """ >>> from rdflib.URIRef import URIRef >>> from rdflib.BNode import BNode >>> BNode("foo")==None False >>> BNode("foo")==URIRef("foo") False >>> URIRef("foo")==BNode("foo") False >>> BNode("foo")!=URIRef("foo") True >>> URIRef("foo")!=BNode("foo") True """ if isinstance(other, BNode): return unicode(self)==unicode(other) else: return False def __str__(self): return self.encode("unicode-escape") def __repr__(self): return """rdflib.BNode('%s')""" % str(self) def md5_term_hash(self): d = md5(str(self)) d.update("B") return d.hexdigest() rdflib-2.4.2/rdflib/Literal.py0000644000175000017500000003170411153616035015141 0ustar nachonachofrom rdflib.Identifier import Identifier from rdflib.URIRef import URIRef from rdflib.Namespace import Namespace from rdflib.exceptions import Error from datetime import date,time,datetime from time import strptime import base64 try: from hashlib import md5 except ImportError: from md5 import md5 import logging _logger = logging.getLogger(__name__) class Literal(Identifier): """ RDF Literal: http://www.w3.org/TR/rdf-concepts/#section-Graph-Literal >>> Literal(1).toPython() 1L >>> cmp(Literal("adsf"), 1) 1 >>> lit2006 = Literal('2006-01-01',datatype=_XSD_NS.date) >>> lit2006.toPython() datetime.date(2006, 1, 1) >>> lit2006 < Literal('2007-01-01',datatype=_XSD_NS.date) True >>> Literal(datetime.utcnow()).datatype rdflib.URIRef('http://www.w3.org/2001/XMLSchema#dateTime') >>> oneInt = Literal(1) >>> twoInt = Literal(2) >>> twoInt < oneInt False >>> Literal('1') < Literal(1) False >>> Literal('1') < Literal('1') False >>> Literal(1) < Literal('1') True >>> Literal(1) < Literal(2.0) True >>> Literal(1) < URIRef('foo') True >>> Literal(1) < 2.0 True >>> Literal(1) < object True >>> lit2006 < "2007" True >>> "2005" < lit2006 True """ __slots__ = ("language", "datatype", "_cmp_value") def __new__(cls, value, lang=None, datatype=None): if datatype: lang = None else: value,datatype = _castPythonToLiteral(value) if datatype: lang = None if datatype: datatype = URIRef(datatype) try: inst = unicode.__new__(cls,value) except UnicodeDecodeError: inst = unicode.__new__(cls,value,'utf-8') inst.language = lang inst.datatype = datatype inst._cmp_value = inst._toCompareValue() return inst def __reduce__(self): return (Literal, (unicode(self), self.language, self.datatype),) def __getstate__(self): return (None, dict(language=self.language, datatype=self.datatype)) def __setstate__(self, arg): _, d = arg self.language = d["language"] self.datatype = d["datatype"] def __add__(self, val): """ >>> Literal(1) + 1 2L >>> Literal("1") + "1" rdflib.Literal(u'11') """ py = self.toPython() if isinstance(py, Literal): s = super(Literal, self).__add__(val) return Literal(s, self.language, self.datatype) else: return py + val def __lt__(self, other): """ >>> Literal("YXNkZg==", datatype=_XSD_NS[u'base64Binary']) < "foo" True >>> u"\xfe" < Literal(u"foo") False >>> Literal(base64.encodestring(u"\xfe".encode("utf-8")), datatype=URIRef("http://www.w3.org/2001/XMLSchema#base64Binary")) < u"foo" False """ if other is None: return False # Nothing is less than None try: return self._cmp_value < other except TypeError, te: return unicode(self._cmp_value) < other except UnicodeDecodeError, ue: if isinstance(self._cmp_value, str): return self._cmp_value < other.encode("utf-8") else: raise ue def __le__(self, other): if other is None: return False if self==other: return True else: return self < other def __gt__(self, other): if other is None: return True # Everything is greater than None try: return self._cmp_value > other except TypeError, te: return unicode(self._cmp_value) > other except UnicodeDecodeError, ue: if isinstance(self._cmp_value, str): return self._cmp_value > other.encode("utf-8") else: raise ue def __ge__(self, other): if other is None: return False if self==other: return True else: return self > other def __ne__(self, other): """ Overriden to ensure property result for comparisons with None via !=. Routes all other such != and <> comparisons to __eq__ >>> Literal('') != None True >>> Literal('2') <> Literal('2') False """ return not self.__eq__(other) def __hash__(self): """ >>> a = {Literal('1',datatype=_XSD_NS.integer):'one'} >>> Literal('1',datatype=_XSD_NS.double) in a False [[ Called for the key object for dictionary operations, and by the built-in function hash(). Should return a 32-bit integer usable as a hash value for dictionary operations. The only required property is that objects which compare equal have the same hash value; it is advised to somehow mix together (e.g., using exclusive or) the hash values for the components of the object that also play a part in comparison of objects. ]] -- 3.4.1 Basic customization (Python) [[ Two literals are equal if and only if all of the following hold: * The strings of the two lexical forms compare equal, character by character. * Either both or neither have language tags. * The language tags, if any, compare equal. * Either both or neither have datatype URIs. * The two datatype URIs, if any, compare equal, character by character. ]] -- 6.5.1 Literal Equality (RDF: Concepts and Abstract Syntax) """ return hash(str(self)) ^ hash(self.language) ^ hash(self.datatype) def __eq__(self, other): """ >>> f = URIRef("foo") >>> f is None or f == '' False >>> Literal("1", datatype=URIRef("foo")) == Literal("1", datatype=URIRef("foo")) True >>> Literal("1", datatype=URIRef("foo")) == Literal("2", datatype=URIRef("foo")) False >>> Literal("1", datatype=URIRef("foo")) == "asdf" False >>> Literal('2007-01-01', datatype=_XSD_NS.date) == Literal('2007-01-01', datatype=_XSD_NS.date) True >>> Literal('2007-01-01', datatype=_XSD_NS.date) == date(2007, 1, 1) True >>> oneInt = Literal(1) >>> oneNoDtype = Literal('1') >>> oneInt == oneNoDtype False >>> Literal("1",_XSD_NS[u'string']) == Literal("1",_XSD_NS[u'string']) True >>> Literal("one",lang="en") == Literal("one",lang="en") True >>> Literal("hast",lang='en') == Literal("hast",lang='de') False >>> oneInt == Literal(1) True >>> oneFloat = Literal(1.0) >>> oneInt == oneFloat True >>> oneInt == 1 True """ if other is None: return False if isinstance(other, Literal): return self._cmp_value == other._cmp_value else: return self._cmp_value == other def n3(self): language = self.language datatype = self.datatype # unfortunately this doesn't work: a newline gets encoded as \\n, which is ok in sourcecode, but we want \n #encoded = self.encode('unicode-escape').replace('\\', '\\\\').replace('"','\\"') #encoded = self.replace.replace('\\', '\\\\').replace('"','\\"') # TODO: We could also chose quotes based on the quotes appearing in the string, i.e. '"' and "'" ... # which is nicer? #if self.find("\"")!=-1 or self.find("'")!=-1 or self.find("\n")!=-1: if self.find("\n")!=-1: # Triple quote this string. encoded=self.replace('\\', '\\\\') if self.find('"""')!=-1: # is this ok? encoded=encoded.replace('"""','\\"""') if encoded.endswith('"'): encoded=encoded[:-1]+"\\\"" encoded='"""%s"""'%encoded else: encoded='"%s"'%self.replace('\n','\\n').replace('\\', '\\\\').replace('"','\\"') if language: if datatype: return '%s@%s^^<%s>' % (encoded, language, datatype) else: return '%s@%s' % (encoded, language) else: if datatype: return '%s^^<%s>' % (encoded, datatype) else: return '%s' % encoded def __str__(self): return self.encode("unicode-escape") def __repr__(self): args = [super(Literal, self).__repr__()] if self.language is not None: args.append("lang=%s" % repr(self.language)) if self.datatype is not None: args.append("datatype=%s" % repr(self.datatype)) return """rdflib.Literal(%s)""" % ", ".join(args) def toPython(self): """ Returns an appropriate python datatype derived from this RDF Literal """ convFunc = _toPythonMapping.get(self.datatype, None) if convFunc: rt = convFunc(self) else: rt = self return rt def _toCompareValue(self): try: rt = self.toPython() except Exception, e: _logger.warning("could not convert %s to a Python datatype" % repr(self)) rt = self if rt is self: if self.language is None and self.datatype is None: return unicode(rt) else: return (unicode(rt), rt.datatype, rt.language) return rt def md5_term_hash(self): d = md5(str(self)) d.update("L") return d.hexdigest() _XSD_NS = Namespace(u'http://www.w3.org/2001/XMLSchema#') #Casts a python datatype to a tuple of the lexical value and a datatype URI (or None) def _castPythonToLiteral(obj): for pType,(castFunc,dType) in _PythonToXSD: if isinstance(obj,pType): if castFunc: return castFunc(obj),dType elif dType: return obj,dType else: return obj,None return obj, None # TODO: is this right for the fall through case? # Mappings from Python types to XSD datatypes and back (burrowed from sparta) # datetime instances are also instances of date... so we need to order these. _PythonToXSD = [ (basestring, (None,None)), (float , (None,_XSD_NS[u'float'])), (int , (None,_XSD_NS[u'integer'])), (long , (None,_XSD_NS[u'long'])), (bool , (None,_XSD_NS[u'boolean'])), (datetime , (lambda i:i.isoformat(),_XSD_NS[u'dateTime'])), (date , (lambda i:i.isoformat(),_XSD_NS[u'date'])), (time , (lambda i:i.isoformat(),_XSD_NS[u'time'])), ] def _strToTime(v) : return strptime(v,"%H:%M:%S") def _strToDate(v) : tstr = strptime(v,"%Y-%m-%d") return date(tstr.tm_year,tstr.tm_mon,tstr.tm_mday) def _strToDateTime(v) : """ Attempt to cast to datetime, or just return the string (otherwise) """ try: tstr = strptime(v,"%Y-%m-%dT%H:%M:%S") except: try: tstr = strptime(v,"%Y-%m-%dT%H:%M:%SZ") except: try: tstr = strptime(v,"%Y-%m-%dT%H:%M:%S%Z") except: return v return datetime(tstr.tm_year,tstr.tm_mon,tstr.tm_mday,tstr.tm_hour,tstr.tm_min,tstr.tm_sec) XSDToPython = { _XSD_NS[u'time'] : _strToTime, _XSD_NS[u'date'] : _strToDate, _XSD_NS[u'dateTime'] : _strToDateTime, _XSD_NS[u'string'] : None, _XSD_NS[u'normalizedString'] : None, _XSD_NS[u'token'] : None, _XSD_NS[u'language'] : None, _XSD_NS[u'boolean'] : lambda i:i.lower() in ['1','true'], _XSD_NS[u'decimal'] : float, _XSD_NS[u'integer'] : long, _XSD_NS[u'nonPositiveInteger'] : int, _XSD_NS[u'long'] : long, _XSD_NS[u'nonNegativeInteger'] : int, _XSD_NS[u'negativeInteger'] : int, _XSD_NS[u'int'] : long, _XSD_NS[u'unsignedLong'] : long, _XSD_NS[u'positiveInteger'] : int, _XSD_NS[u'short'] : int, _XSD_NS[u'unsignedInt'] : long, _XSD_NS[u'byte'] : int, _XSD_NS[u'unsignedShort'] : int, _XSD_NS[u'unsignedByte'] : int, _XSD_NS[u'float'] : float, _XSD_NS[u'double'] : float, _XSD_NS[u'base64Binary'] : base64.decodestring, _XSD_NS[u'anyURI'] : None, } _toPythonMapping = {} _toPythonMapping.update(XSDToPython) def bind(datatype, conversion_function): """bind a datatype to a function for converting it into a Python instance.""" if datatype in _toPythonMapping: _logger.warning("datatype '%s' was already bound. Rebinding." % datatype) _toPythonMapping[datatype] = conversion_function def test(): import doctest doctest.testmod() if __name__ == '__main__': test() rdflib-2.4.2/rdflib/QueryResult.py0000644000175000017500000000056611153616035016053 0ustar nachonachoclass QueryResult(object): """ A common class for representing query result in a variety of formats, namely: xml : as an XML string using the XML result format of the query language python: as Python objects json : as JSON """ def __init__(self,pythonResult): self.rt = pythonResult def serialize(self,format='xml'): passrdflib-2.4.2/rdflib/Collection.py0000644000175000017500000001752211153616035015642 0ustar nachonachofrom rdflib import RDF, BNode, Literal from rdflib.Graph import Graph class Collection(object): """ See 3.3.5 Emulating container types: http://docs.python.org/ref/sequence-types.html#l2h-232 >>> from rdflib.BNode import BNode >>> from rdflib.Literal import Literal >>> from rdflib.Graph import Graph >>> listName = BNode() >>> g = Graph('IOMemory') >>> listItem1 = BNode() >>> listItem2 = BNode() >>> g.add((listName,RDF.first,Literal(1))) >>> g.add((listName,RDF.rest,listItem1)) >>> g.add((listItem1,RDF.first,Literal(2))) >>> g.add((listItem1,RDF.rest,listItem2)) >>> g.add((listItem2,RDF.rest,RDF.nil)) >>> g.add((listItem2,RDF.first,Literal(3))) >>> c=Collection(g,listName) >>> print list(c) [rdflib.Literal(u'1', datatype=rdflib.URIRef('http://www.w3.org/2001/XMLSchema#integer')), rdflib.Literal(u'2', datatype=rdflib.URIRef('http://www.w3.org/2001/XMLSchema#integer')), rdflib.Literal(u'3', datatype=rdflib.URIRef('http://www.w3.org/2001/XMLSchema#integer'))] >>> 1 in c True >>> len(c) 3 >>> c._get_container(1) == listItem1 True >>> c.index(Literal(2)) == 1 True """ def __init__(self, graph, uri, seq=[]): self.graph = graph self.uri = uri or BNode() for item in seq: self.append(item) def n3(self): """ >>> from rdflib.BNode import BNode >>> from rdflib.Literal import Literal >>> from rdflib.Graph import Graph >>> listName = BNode() >>> g = Graph('IOMemory') >>> listItem1 = BNode() >>> listItem2 = BNode() >>> g.add((listName,RDF.first,Literal(1))) >>> g.add((listName,RDF.rest,listItem1)) >>> g.add((listItem1,RDF.first,Literal(2))) >>> g.add((listItem1,RDF.rest,listItem2)) >>> g.add((listItem2,RDF.rest,RDF.nil)) >>> g.add((listItem2,RDF.first,Literal(3))) >>> c=Collection(g,listName) >>> print c.n3() ( "1"^^ "2"^^ "3"^^ ) """ return "( %s )"%(' '.join([i.n3() for i in self])) def _get_container(self, index): """Gets the first, rest holding node at index.""" assert isinstance(index, int) graph = self.graph container = self.uri i = 0 while i>> from rdflib import RDF, RDFS >>> from pprint import pformat >>> g=Graph() >>> a=BNode('foo') >>> b=BNode('bar') >>> c=BNode('baz') >>> g.add((a,RDF.first,RDF.type)) >>> g.add((a,RDF.rest,b)) >>> g.add((b,RDF.first,RDFS.label)) >>> g.add((b,RDF.rest,c)) >>> g.add((c,RDF.first,RDFS.comment)) >>> g.add((c,RDF.rest,RDF.nil)) >>> len(g) 6 >>> def listAncestry(node,graph): ... for i in graph.subjects(RDF.rest,node): ... yield i >>> [str(node.n3()) for node in g.transitiveClosure(listAncestry,RDF.nil)] ['_:baz', '_:bar', '_:foo'] >>> lst=Collection(g,a) >>> len(lst) 3 >>> b==lst._get_container(1) True >>> c==lst._get_container(2) True >>> del lst[1] >>> len(lst) 2 >>> len(g) 4 """ self[key] # to raise any potential key exceptions graph = self.graph current = self._get_container(key) assert current if len(self)==1 and key>0: pass elif key==len(self)-1: #the tail priorLink = self._get_container(key-1) self.graph.set((priorLink,RDF.rest,RDF.nil)) graph.remove((current, None, None)) else: next = self._get_container(key+1) prior = self._get_container(key-1) assert next and prior graph.remove((current, None, None)) graph.set((prior, RDF.rest, next)) def __iter__(self): """Iterator over items in Collections""" return self.graph.items(self.uri) def append(self, item): """ >>> from rdflib.Graph import Graph >>> listName = BNode() >>> g = Graph() >>> c=Collection(g,listName,[Literal(1),Literal(2)]) >>> links = [list(g.subjects(object=i,predicate=RDF.first))[0] for i in c] >>> len([i for i in links if (i,RDF.rest,RDF.nil) in g]) 1 """ container = self.uri graph = self.graph #iterate to the end of the linked list rest = graph.value(container, RDF.rest) while rest: if rest == RDF.nil: #the end, append to the end of the linked list node = BNode() graph.set((container, RDF.rest, node)) container=node break else: #move down one link if container != self.uri: rest = graph.value(rest, RDF.rest) if not rest == RDF.nil: container=rest graph.add((container, RDF.first, item)) graph.add((container, RDF.rest, RDF.nil)) def clear(self): container = self.uri graph = self.graph while container: rest = graph.value(container, RDF.rest) graph.remove((container, RDF.first, None)) graph.remove((container, RDF.rest, None)) container = rest def test(): import doctest doctest.testmod() if __name__=="__main__": test() g = Graph() c = Collection(g, BNode()) assert len(c)==0 c = Collection(g, BNode(), [Literal("1"), Literal("2"), Literal("3"), Literal("4")]) assert len(c)==4 assert c[1]==Literal("2"), c[1] del c[1] assert list(c)==[Literal("1"), Literal("3"), Literal("4")], list(c) try: del c[500] except IndexError, i: pass c.append(Literal("5")) print list(c) for i in c: print i del c[3] c.clear() assert len(c)==0 rdflib-2.4.2/rdflib/Graph.py0000644000175000017500000013073711153616035014614 0ustar nachonachofrom __future__ import generators __doc__=""" Instanciating Graphs with default store (IOMemory) and default identifier (a BNode): >>> g=Graph() >>> g.store.__class__ >>> g.identifier.__class__ Instanciating Graphs with a specific kind of store (IOMemory) and a default identifier (a BNode): Other store kinds: Sleepycat, MySQL, ZODB, SQLite >>> store = plugin.get('IOMemory',Store)() >>> store.__class__.__name__ 'IOMemory' >>> graph = Graph(store) >>> graph.store.__class__ Instanciating Graphs with Sleepycat store and an identifier - : >>> g=Graph('Sleepycat',URIRef("http://rdflib.net")) >>> g.identifier rdflib.URIRef('http://rdflib.net') >>> str(g) " a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label 'Sleepycat']." Creating a ConjunctiveGraph - The top level container for all named Graphs in a 'database': >>> g=ConjunctiveGraph() >>> str(g.default_context) "[a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label 'IOMemory']]." Adding / removing reified triples to Graph and iterating over it directly or via triple pattern: >>> g=Graph('IOMemory') >>> statementId = BNode() >>> print len(g) 0 >>> g.add((statementId,RDF.type,RDF.Statement)) >>> g.add((statementId,RDF.subject,URIRef('http://rdflib.net/store/ConjunctiveGraph'))) >>> g.add((statementId,RDF.predicate,RDFS.label)) >>> g.add((statementId,RDF.object,Literal("Conjunctive Graph"))) >>> print len(g) 4 >>> for s,p,o in g: print type(s) ... >>> for s,p,o in g.triples((None,RDF.object,None)): print o ... Conjunctive Graph >>> g.remove((statementId,RDF.type,RDF.Statement)) >>> print len(g) 3 None terms in calls to triple can be thought of as 'open variables' Graph Aggregation - ConjunctiveGraphs and ReadOnlyGraphAggregate within the same store: >>> store = plugin.get('IOMemory',Store)() >>> g1 = Graph(store) >>> g2 = Graph(store) >>> g3 = Graph(store) >>> stmt1 = BNode() >>> stmt2 = BNode() >>> stmt3 = BNode() >>> g1.add((stmt1,RDF.type,RDF.Statement)) >>> g1.add((stmt1,RDF.subject,URIRef('http://rdflib.net/store/ConjunctiveGraph'))) >>> g1.add((stmt1,RDF.predicate,RDFS.label)) >>> g1.add((stmt1,RDF.object,Literal("Conjunctive Graph"))) >>> g2.add((stmt2,RDF.type,RDF.Statement)) >>> g2.add((stmt2,RDF.subject,URIRef('http://rdflib.net/store/ConjunctiveGraph'))) >>> g2.add((stmt2,RDF.predicate,RDF.type)) >>> g2.add((stmt2,RDF.object,RDFS.Class)) >>> g3.add((stmt3,RDF.type,RDF.Statement)) >>> g3.add((stmt3,RDF.subject,URIRef('http://rdflib.net/store/ConjunctiveGraph'))) >>> g3.add((stmt3,RDF.predicate,RDFS.comment)) >>> g3.add((stmt3,RDF.object,Literal("The top-level aggregate graph - The sum of all named graphs within a Store"))) >>> len(list(ConjunctiveGraph(store).subjects(RDF.type,RDF.Statement))) 3 >>> len(list(ReadOnlyGraphAggregate([g1,g2]).subjects(RDF.type,RDF.Statement))) 2 ConjunctiveGraphs have a 'quads' method which returns quads instead of triples, where the fourth item is the Graph (or subclass thereof) instance in which the triple was asserted: >>> from sets import Set >>> uniqueGraphNames = Set([graph.identifier for s,p,o,graph in ConjunctiveGraph(store).quads((None,RDF.predicate,None))]) >>> len(uniqueGraphNames) 3 >>> unionGraph = ReadOnlyGraphAggregate([g1,g2]) >>> uniqueGraphNames = Set([graph.identifier for s,p,o,graph in unionGraph.quads((None,RDF.predicate,None))]) >>> len(uniqueGraphNames) 2 Parsing N3 from StringIO >>> g2=Graph() >>> src = \"\"\" ... @prefix rdf: . ... @prefix rdfs: . ... [ a rdf:Statement ; ... rdf:subject ; ... rdf:predicate rdfs:label; ... rdf:object "Conjunctive Graph" ] \"\"\" >>> g2=g2.parse(StringIO(src),format='n3') >>> print len(g2) 4 Using Namespace class: >>> RDFLib = Namespace('http://rdflib.net') >>> RDFLib.ConjunctiveGraph rdflib.URIRef('http://rdflib.netConjunctiveGraph') >>> RDFLib['Graph'] rdflib.URIRef('http://rdflib.netGraph') SPARQL Queries >>> print len(g) 3 >>> q = \'\'\' ... PREFIX rdf: SELECT ?pred WHERE { ?stmt rdf:predicate ?pred. } ... \'\'\' >>> for pred in g.query(q): print pred (rdflib.URIRef('http://www.w3.org/2000/01/rdf-schema#label'),) SPARQL Queries with namespace bindings as argument >>> nsMap = {u"rdf":RDF.RDFNS} >>> for pred in g.query("SELECT ?pred WHERE { ?stmt rdf:predicate ?pred. }", initNs=nsMap): print pred (rdflib.URIRef('http://www.w3.org/2000/01/rdf-schema#label'),) Parameterized SPARQL Queries >>> top = { Variable("?term") : RDF.predicate } >>> for pred in g.query("SELECT ?pred WHERE { ?stmt ?term ?pred. }", initBindings=top): print pred (rdflib.URIRef('http://www.w3.org/2000/01/rdf-schema#label'),) """ from cStringIO import StringIO from rdflib import URIRef, BNode, Namespace, Literal, Variable from rdflib import RDF, RDFS from rdflib.Node import Node from rdflib import plugin, exceptions from rdflib.store import Store from rdflib.syntax.serializer import Serializer from rdflib.syntax.parsers import Parser from rdflib.syntax.NamespaceManager import NamespaceManager from rdflib import sparql from rdflib.QueryResult import QueryResult from rdflib.URLInputSource import URLInputSource from xml.sax.xmlreader import InputSource from xml.sax.saxutils import prepare_input_source import logging _logger = logging.getLogger("rdflib.Graph") #import md5 import random import warnings try: from hashlib import md5 except ImportError: from md5 import md5 def describe(terms,bindings,graph): """ Default DESCRIBE returns all incomming and outgoing statements about the given terms """ from rdflib.sparql.sparqlOperators import getValue g=Graph() terms=[getValue(i)(bindings) for i in terms] for s,p,o in graph.triples_choices((terms,None,None)): g.add((s,p,o)) for s,p,o in graph.triples_choices((None,None,terms)): g.add((s,p,o)) return g class Graph(Node): """An RDF Graph The constructor accepts one argument, the 'store' that will be used to store the graph data (see the 'store' package for stores currently shipped with rdflib). Stores can be context-aware or unaware. Unaware stores take up (some) less space but cannot support features that require context, such as true merging/demerging of sub-graphs and provenance. The Graph constructor can take an identifier which identifies the Graph by name. If none is given, the graph is assigned a BNode for it's identifier. For more on named graphs, see: http://www.w3.org/2004/03/trix/ Ontology for __str__ provenance terms: @prefix rdf: . @prefix rdfs: . @prefix : . @prefix rdfg: . @prefix owl: . @prefix log: . @prefix xsd: . :Store a owl:Class; rdfs:subClassOf ; rdfs:subClassOf [a owl:Restriction; owl:onProperty rdfs:label; owl:allValuesFrom [a owl:DataRange; owl:oneOf ("IOMemory" "Sleepcat" "MySQL" "Redland" "REGEXMatching" "ZODB" "AuditableStorage" "Memory")] ]. :ConjunctiveGraph a owl:Class; rdfs:subClassOf rdfg:Graph; rdfs:label "The top-level graph within the store - the union of all the Graphs within." rdfs:seeAlso . :DefaultGraph a owl:Class; rdfs:subClassOf rdfg:Graph; rdfs:label "The 'default' subgraph of a conjunctive graph". :identifier a owl:Datatypeproperty; rdfs:label "The store-associated identifier of the formula. ". rdfs:domain log:Formula rdfs:range xsd:anyURI; :storage a owl:ObjectProperty; rdfs:domain [ a owl:Class; owl:unionOf (log:Formula rdfg:Graph :ConjunctiveGraph) ]; rdfs:range :Store. :default_context a owl:FunctionalProperty; rdfs:label "The default context for a conjunctive graph"; rdfs:domain :ConjunctiveGraph; rdfs:range :DefaultGraph. {?cg a :ConjunctiveGraph;:storage ?store} => {?cg owl:sameAs ?store}. {?subGraph rdfg:subGraphOf ?cg;a :DefaultGraph} => {?cg a :ConjunctiveGraph;:default_context ?subGraphOf} . """ def __init__(self, store='default', identifier=None, namespace_manager=None): super(Graph, self).__init__() self.__identifier = identifier or BNode() if not isinstance(store, Store): # TODO: error handling self.__store = store = plugin.get(store, Store)() else: self.__store = store self.__namespace_manager = namespace_manager self.context_aware = False self.formula_aware = False def __get_store(self): return self.__store store = property(__get_store) def __get_identifier(self): return self.__identifier identifier = property(__get_identifier) def _get_namespace_manager(self): if self.__namespace_manager is None: self.__namespace_manager = NamespaceManager(self) return self.__namespace_manager def _set_namespace_manager(self, nm): self.__namespace_manager = nm namespace_manager = property(_get_namespace_manager, _set_namespace_manager) def __repr__(self): return "" % (self.identifier, type(self)) def __str__(self): if isinstance(self.identifier,URIRef): return "%s a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label '%s']."%(self.identifier.n3(),self.store.__class__.__name__) else: return "[a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label '%s']]."%(self.store.__class__.__name__) def destroy(self, configuration): """Destroy the store identified by `configuration` if supported""" self.__store.destroy(configuration) #Transactional interfaces (optional) def commit(self): """Commits active transactions""" self.__store.commit() def rollback(self): """Rollback active transactions""" self.__store.rollback() def open(self, configuration, create=False): """Open the graph store Might be necessary for stores that require opening a connection to a database or acquiring some resource. """ return self.__store.open(configuration, create) def close(self, commit_pending_transaction=False): """Close the graph store Might be necessary for stores that require closing a connection to a database or releasing some resource. """ self.__store.close(commit_pending_transaction=commit_pending_transaction) def add(self, (s, p, o)): """Add a triple with self as context""" self.__store.add((s, p, o), self, quoted=False) def addN(self, quads): """Add a sequence of triple with context""" self.__store.addN([(s, p, o, c) for s, p, o, c in quads if isinstance(c, Graph) and c.identifier is self.identifier]) def remove(self, (s, p, o)): """Remove a triple from the graph If the triple does not provide a context attribute, removes the triple from all contexts. """ self.__store.remove((s, p, o), context=self) def triples(self, (s, p, o)): """Generator over the triple store Returns triples that match the given triple pattern. If triple pattern does not provide a context, all contexts will be searched. """ for (s, p, o), cg in self.__store.triples((s, p, o), context=self): yield (s, p, o) def __len__(self): """Returns the number of triples in the graph If context is specified then the number of triples in the context is returned instead. """ return self.__store.__len__(context=self) def __iter__(self): """Iterates over all triples in the store""" return self.triples((None, None, None)) def __contains__(self, triple): """Support for 'triple in graph' syntax""" for triple in self.triples(triple): return 1 return 0 def __hash__(self): return hash(self.identifier) def md5_term_hash(self): d = md5(str(self.identifier)) d.update("G") return d.hexdigest() def __cmp__(self, other): if other is None: return -1 elif isinstance(other, Graph): return cmp(self.identifier, other.identifier) else: #Note if None is considered equivalent to owl:Nothing #Then perhaps a graph with length 0 should be considered #equivalent to None (if compared to it)? return 1 def __iadd__(self, other): """Add all triples in Graph other to Graph""" for triple in other: self.add(triple) return self def __isub__(self, other): """Subtract all triples in Graph other from Graph""" for triple in other: self.remove(triple) return self def __add__(self,other) : """Set theoretical union""" retval = Graph() for x in self.graph: retval.add(x) for y in other.graph: retval.add(y) return retval def __mul__(self,other) : """Set theoretical intersection""" retval = Graph() for x in other.graph: if x in self.graph: retval.add(x) return retval def __sub__(self,other) : """Set theoretical difference""" retval = Graph() for x in self.graph: if not x in other.graph : retval.add(x) return retval # Conv. methods def set(self, (subject, predicate, object)): """Convenience method to update the value of object Remove any existing triples for subject and predicate before adding (subject, predicate, object). """ self.remove((subject, predicate, None)) self.add((subject, predicate, object)) def subjects(self, predicate=None, object=None): """A generator of subjects with the given predicate and object""" for s, p, o in self.triples((None, predicate, object)): yield s def predicates(self, subject=None, object=None): """A generator of predicates with the given subject and object""" for s, p, o in self.triples((subject, None, object)): yield p def objects(self, subject=None, predicate=None): """A generator of objects with the given subject and predicate""" for s, p, o in self.triples((subject, predicate, None)): yield o def subject_predicates(self, object=None): """A generator of (subject, predicate) tuples for the given object""" for s, p, o in self.triples((None, None, object)): yield s, p def subject_objects(self, predicate=None): """A generator of (subject, object) tuples for the given predicate""" for s, p, o in self.triples((None, predicate, None)): yield s, o def predicate_objects(self, subject=None): """A generator of (predicate, object) tuples for the given subject""" for s, p, o in self.triples((subject, None, None)): yield p, o def triples_choices(self, (subject, predicate, object_),context=None): for (s, p, o), cg in self.store.triples_choices( (subject, predicate, object_), context=self): yield (s, p, o) def value(self, subject=None, predicate=RDF.value, object=None, default=None, any=True): """Get a value for a pair of two criteria Exactly one of subject, predicate, object must be None. Useful if one knows that there may only be one value. It is one of those situations that occur a lot, hence this 'macro' like utility Parameters: ----------- subject, predicate, object -- exactly one must be None default -- value to be returned if no values found any -- if True: return any value in the case there is more than one else: raise UniquenessError """ retval = default if (subject is None and predicate is None) or \ (subject is None and object is None) or \ (predicate is None and object is None): return None if object is None: values = self.objects(subject, predicate) if subject is None: values = self.subjects(predicate, object) if predicate is None: values = self.predicates(subject, object) try: retval = values.next() except StopIteration, e: retval = default else: if any is False: try: next = values.next() msg = ("While trying to find a value for (%s, %s, %s) the " "following multiple values where found:\n" % (subject, predicate, object)) triples = self.store.triples((subject, predicate, object), None) for (s, p, o), contexts in triples: msg += "(%s, %s, %s)\n (contexts: %s)\n" % ( s, p, o, list(contexts)) raise exceptions.UniquenessError(msg) except StopIteration, e: pass return retval def label(self, subject, default=''): """Query for the RDFS.label of the subject Return default if no label exists """ if subject is None: return default return self.value(subject, RDFS.label, default=default, any=True) def comment(self, subject, default=''): """Query for the RDFS.comment of the subject Return default if no comment exists """ if subject is None: return default return self.value(subject, RDFS.comment, default=default, any=True) def items(self, list): """Generator over all items in the resource specified by list list is an RDF collection. """ while list: item = self.value(list, RDF.first) if item: yield item list = self.value(list, RDF.rest) def transitiveClosure(self,func,arg): """ Generates transitive closure of a user-defined function against the graph >>> from rdflib.Collection import Collection >>> g=Graph() >>> a=BNode('foo') >>> b=BNode('bar') >>> c=BNode('baz') >>> g.add((a,RDF.first,RDF.type)) >>> g.add((a,RDF.rest,b)) >>> g.add((b,RDF.first,RDFS.label)) >>> g.add((b,RDF.rest,c)) >>> g.add((c,RDF.first,RDFS.comment)) >>> g.add((c,RDF.rest,RDF.nil)) >>> def topList(node,g): ... for s in g.subjects(RDF.rest,node): ... yield s >>> def reverseList(node,g): ... for f in g.objects(node,RDF.first): ... print f ... for s in g.subjects(RDF.rest,node): ... yield s >>> [rt for rt in g.transitiveClosure(topList,RDF.nil)] [rdflib.BNode('baz'), rdflib.BNode('bar'), rdflib.BNode('foo')] >>> [rt for rt in g.transitiveClosure(reverseList,RDF.nil)] http://www.w3.org/2000/01/rdf-schema#comment http://www.w3.org/2000/01/rdf-schema#label http://www.w3.org/1999/02/22-rdf-syntax-ns#type [rdflib.BNode('baz'), rdflib.BNode('bar'), rdflib.BNode('foo')] """ for rt in func(arg,self): yield rt for rt_2 in self.transitiveClosure(func,rt): yield rt_2 def transitive_objects(self, subject, property, remember=None): """Transitively generate objects for the `property` relationship Generated objects belong to the depth first transitive closure of the `property` relationship starting at `subject`. """ if remember is None: remember = {} if subject in remember: return remember[subject] = 1 yield subject for object in self.objects(subject, property): for o in self.transitive_objects(object, property, remember): yield o def transitive_subjects(self, predicate, object, remember=None): """Transitively generate objects for the `property` relationship Generated objects belong to the depth first transitive closure of the `property` relationship starting at `subject`. """ if remember is None: remember = {} if object in remember: return remember[object] = 1 yield object for subject in self.subjects(predicate, object): for s in self.transitive_subjects(predicate, subject, remember): yield s def seq(self, subject): """Check if subject is an rdf:Seq If yes, it returns a Seq class instance, None otherwise. """ if (subject, RDF.type, RDF.Seq) in self: return Seq(self, subject) else: return None def qname(self, uri): return self.namespace_manager.qname(uri) def compute_qname(self, uri): return self.namespace_manager.compute_qname(uri) def bind(self, prefix, namespace, override=True): """Bind prefix to namespace If override is True will bind namespace to given prefix if namespace was already bound to a different prefix. """ return self.namespace_manager.bind(prefix, namespace, override=override) def namespaces(self): """Generator over all the prefix, namespace tuples""" for prefix, namespace in self.namespace_manager.namespaces(): yield prefix, namespace def absolutize(self, uri, defrag=1): """Turn uri into an absolute URI if it's not one already""" return self.namespace_manager.absolutize(uri, defrag) def serialize(self, destination=None, format="xml", base=None, encoding=None, **args): """Serialize the Graph to destination If destination is None serialize method returns the serialization as a string. Format defaults to xml (AKA rdf/xml). """ serializer = plugin.get(format, Serializer)(self) return serializer.serialize(destination, base=base, encoding=encoding, **args) def prepare_input_source(self, source, publicID=None): if isinstance(source, InputSource): input_source = source else: if hasattr(source, "read") and not isinstance(source, Namespace): # we need to make sure it's not an instance of Namespace since # Namespace instances have a read attr input_source = prepare_input_source(source) else: location = self.absolutize(source) input_source = URLInputSource(location) publicID = publicID or location if publicID: input_source.setPublicId(publicID) id = input_source.getPublicId() if id is None: #_logger.warning("no publicID set for source. Using '' for publicID.") input_source.setPublicId("") return input_source def parse(self, source, publicID=None, format="xml", **args): """ Parse source into Graph If Graph is context-aware it'll get loaded into it's own context (sub graph). Format defaults to xml (AKA rdf/xml). The publicID argument is for specifying the logical URI for the case that it's different from the physical source URI. Returns the context into which the source was parsed. """ source = self.prepare_input_source(source, publicID) parser = plugin.get(format, Parser)() parser.parse(source, self, **args) return self def load(self, source, publicID=None, format="xml"): self.parse(source, publicID, format) def query(self, strOrQuery, initBindings={}, initNs={}, DEBUG=False, dataSetBase=None, processor="sparql", extensionFunctions={sparql.DESCRIBE:describe}): """ Executes a SPARQL query (eventually will support Versa queries with same method) against this Graph strOrQuery - Is either a string consisting of the SPARQL query or an instance of rdflib.sparql.bison.Query.Query initBindings - A mapping from a Variable to an RDFLib term (used as initial bindings for SPARQL query) initNS - A mapping from a namespace prefix to an instance of rdflib.Namespace (used for SPARQL query) DEBUG - A boolean flag passed on to the SPARQL parser and evaluation engine processor - The kind of RDF query (must be 'sparql' until Versa is ported) """ assert processor == 'sparql',"SPARQL is currently the only supported RDF query language" p = plugin.get(processor, sparql.Processor)(self) return plugin.get('SPARQLQueryResult',QueryResult)(p.query(strOrQuery, initBindings, initNs, DEBUG, dataSetBase, extensionFunctions)) processor_plugin = plugin.get(processor, sparql.Processor)(self.store) qresult_plugin = plugin.get('SPARQLQueryResult', QueryResult) res = processor_plugin.query(strOrQuery, initBindings, initNs, DEBUG, extensionFunctions=extensionFunctions) return qresult_plugin(res) def n3(self): """return an n3 identifier for the Graph""" return "[%s]" % self.identifier.n3() def __reduce__(self): return (Graph, (self.store, self.identifier,)) def isomorphic(self, other): # TODO: this is only an approximation. if len(self) != len(other): return False for s, p, o in self: if not isinstance(s, BNode) and not isinstance(o, BNode): if not (s, p, o) in other: return False for s, p, o in other: if not isinstance(s, BNode) and not isinstance(o, BNode): if not (s, p, o) in self: return False # TODO: very well could be a false positive at this point yet. return True def connected(self): """Check if the Graph is connected The Graph is considered undirectional. Performs a search on the Graph, starting from a random node. Then iteratively goes depth-first through the triplets where the node is subject and object. Return True if all nodes have been visited and False if it cannot continue and there are still unvisited nodes left. """ all_nodes = list(self.all_nodes()) discovered = [] # take a random one, could also always take the first one, doesn't # really matter. visiting = [all_nodes[random.randrange(len(all_nodes))]] while visiting: x = visiting.pop() if x not in discovered: discovered.append(x) for new_x in self.objects(subject=x): if new_x not in discovered and new_x not in visiting: visiting.append(new_x) for new_x in self.subjects(object=x): if new_x not in discovered and new_x not in visiting: visiting.append(new_x) # optimisation by only considering length, since no new objects can # be introduced anywhere. if len(all_nodes) == len(discovered): return True else: return False def all_nodes(self): obj = set(self.objects()) allNodes = obj.union(set(self.subjects())) return allNodes class ConjunctiveGraph(Graph): def __init__(self, store='default', identifier=None): super(ConjunctiveGraph, self).__init__(store) assert self.store.context_aware, ("ConjunctiveGraph must be backed by" " a context aware store.") self.context_aware = True self.default_context = Graph(store=self.store, identifier=identifier or BNode()) def __str__(self): pattern = ("[a rdflib:ConjunctiveGraph;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']]") return pattern % self.store.__class__.__name__ def add(self, (s, p, o)): """Add the triple to the default context""" self.store.add((s, p, o), context=self.default_context, quoted=False) def addN(self, quads): """Add a sequence of triple with context""" self.store.addN(quads) def remove(self, (s, p, o)): """Removes from all its contexts""" self.store.remove((s, p, o), context=None) def triples(self, (s, p, o)): """Iterate over all the triples in the entire conjunctive graph""" for (s, p, o), cg in self.store.triples((s, p, o), context=None): yield s, p, o def quads(self,(s,p,o)): """Iterate over all the quads in the entire conjunctive graph""" for (s, p, o), cg in self.store.triples((s, p, o), context=None): for ctx in cg: yield s, p, o, ctx def triples_choices(self, (s, p, o)): """Iterate over all the triples in the entire conjunctive graph""" for (s1, p1, o1), cg in self.store.triples_choices((s, p, o), context=None): yield (s1, p1, o1) def __len__(self): """Number of triples in the entire conjunctive graph""" return self.store.__len__() def contexts(self, triple=None): """Iterate over all contexts in the graph If triple is specified, iterate over all contexts the triple is in. """ for context in self.store.contexts(triple): yield context def remove_context(self, context): """Removes the given context from the graph""" self.store.remove((None, None, None), context) def context_id(self, uri, context_id=None): """URI#context""" uri = uri.split("#", 1)[0] if context_id is None: context_id = "#context" return URIRef(context_id, base=uri) def parse(self, source, publicID=None, format="xml", **args): """Parse source into Graph into it's own context (sub graph) Format defaults to xml (AKA rdf/xml). The publicID argument is for specifying the logical URI for the case that it's different from the physical source URI. Returns the context into which the source was parsed. In the case of n3 it returns the root context. """ source = self.prepare_input_source(source, publicID) id = self.context_id(self.absolutize(source.getPublicId())) context = Graph(store=self.store, identifier=id) context.remove((None, None, None)) context.parse(source, publicID=publicID, format=format, **args) return context def __reduce__(self): return (ConjunctiveGraph, (self.store, self.identifier)) class QuotedGraph(Graph): def __init__(self, store, identifier): super(QuotedGraph, self).__init__(store, identifier) def add(self, triple): """Add a triple with self as context""" self.store.add(triple, self, quoted=True) def addN(self,quads): """Add a sequence of triple with context""" self.store.addN([(s,p,o,c) for s,p,o,c in quads if isinstance(c, QuotedGraph) and c.identifier is self.identifier]) def n3(self): """Return an n3 identifier for the Graph""" return "{%s}" % self.identifier.n3() def __str__(self): identifier = self.identifier.n3() label = self.store.__class__.__name__ pattern = ("{this rdflib.identifier %s;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']}") return pattern % (identifier, label) def __reduce__(self): return (QuotedGraph, (self.store, self.identifier)) class GraphValue(QuotedGraph): def __init__(self, store, identifier=None, graph=None): if graph is not None: assert identifier is None np = store.node_pickler identifier = md5() s = list(graph.triples((None, None, None))) s.sort() for t in s: identifier.update("^".join((np.dumps(i) for i in t))) identifier = URIRef("data:%s" % identifier.hexdigest()) super(GraphValue, self).__init__(store, identifier) for t in graph: store.add(t, context=self) else: super(GraphValue, self).__init__(store, identifier) def add(self, triple): raise Exception("not mutable") def remove(self, triple): raise Exception("not mutable") def __reduce__(self): return (GraphValue, (self.store, self.identifier,)) class Seq(object): """Wrapper around an RDF Seq resource It implements a container type in Python with the order of the items returned corresponding to the Seq content. It is based on the natural ordering of the predicate names _1, _2, _3, etc, which is the 'implementation' of a sequence in RDF terms. """ def __init__(self, graph, subject): """Parameters: - graph: the graph containing the Seq - subject: the subject of a Seq. Note that the init does not check whether this is a Seq, this is done in whoever creates this instance! """ _list = self._list = list() LI_INDEX = RDF.RDFNS["_"] for (p, o) in graph.predicate_objects(subject): if p.startswith(LI_INDEX): #!= RDF.Seq: # i = int(p.replace(LI_INDEX, '')) _list.append((i, o)) # here is the trick: the predicates are _1, _2, _3, etc. Ie, # by sorting the keys (by integer) we have what we want! _list.sort() def __iter__(self): """Generator over the items in the Seq""" for _, item in self._list: yield item def __len__(self): """Length of the Seq""" return len(self._list) def __getitem__(self, index): """Item given by index from the Seq""" index, item = self._list.__getitem__(index) return item class BackwardCompatGraph(ConjunctiveGraph): def __init__(self, backend='default'): warnings.warn("Use ConjunctiveGraph instead. " "( from rdflib.Graph import ConjunctiveGraph )", DeprecationWarning, stacklevel=2) super(BackwardCompatGraph, self).__init__(store=backend) def __get_backend(self): return self.store backend = property(__get_backend) def open(self, configuration, create=True): return ConjunctiveGraph.open(self, configuration, create) def add(self, (s, p, o), context=None): """Add to to the given context or to the default context""" if context is not None: c = self.get_context(context) assert c.identifier == context, "%s != %s" % (c.identifier, context) else: c = self.default_context self.store.add((s, p, o), context=c, quoted=False) def remove(self, (s, p, o), context=None): """Remove from the given context or from the default context""" if context is not None: context = self.get_context(context) self.store.remove((s, p, o), context) def triples(self, (s, p, o), context=None): """Iterate over all the triples in the entire graph""" if context is not None: c = self.get_context(context) assert c.identifier == context else: c = None for (s, p, o), cg in self.store.triples((s, p, o), c): yield (s, p, o) def __len__(self, context=None): """Number of triples in the entire graph""" if context is not None: context = self.get_context(context) return self.store.__len__(context) def get_context(self, identifier, quoted=False): """Return a context graph for the given identifier identifier must be a URIRef or BNode. """ assert isinstance(identifier, URIRef) or \ isinstance(identifier, BNode), type(identifier) if quoted: assert False return QuotedGraph(self.store, identifier) #return QuotedGraph(self.store, Graph(store=self.store, # identifier=identifier)) else: return Graph(store=self.store, identifier=identifier, namespace_manager=self) #return Graph(self.store, Graph(store=self.store, # identifier=identifier)) def remove_context(self, context): """Remove the given context from the graph""" self.store.remove((None, None, None), self.get_context(context)) def contexts(self, triple=None): """Iterate over all contexts in the graph If triple is specified, iterate over all contexts the triple is in. """ for context in self.store.contexts(triple): yield context.identifier def subjects(self, predicate=None, object=None, context=None): """Generate subjects with the given predicate and object""" for s, p, o in self.triples((None, predicate, object), context): yield s def predicates(self, subject=None, object=None, context=None): """Generate predicates with the given subject and object""" for s, p, o in self.triples((subject, None, object), context): yield p def objects(self, subject=None, predicate=None, context=None): """Generate objects with the given subject and predicate""" for s, p, o in self.triples((subject, predicate, None), context): yield o def subject_predicates(self, object=None, context=None): """Generate (subject, predicate) tuples for the given object""" for s, p, o in self.triples((None, None, object), context): yield s, p def subject_objects(self, predicate=None, context=None): """Generate (subject, object) tuples for the given predicate""" for s, p, o in self.triples((None, predicate, None), context): yield s, o def predicate_objects(self, subject=None, context=None): """Generate (predicate, object) tuples for the given subject""" for s, p, o in self.triples((subject, None, None), context): yield p, o def __reduce__(self): return (BackwardCompatGraph, (self.store, self.identifier)) def save(self, destination, format="xml", base=None, encoding=None): warnings.warn("Use serialize method instead. ", DeprecationWarning, stacklevel=2) self.serialize(destination=destination, format=format, base=base, encoding=encoding) class ModificationException(Exception): def __init__(self): pass def __str__(self): return ("Modifications and transactional operations not allowed on " "ReadOnlyGraphAggregate instances") class UnSupportedAggregateOperation(Exception): def __init__(self): pass def __str__(self): return ("This operation is not supported by ReadOnlyGraphAggregate " "instances") class ReadOnlyGraphAggregate(ConjunctiveGraph): """Utility class for treating a set of graphs as a single graph Only read operations are supported (hence the name). Essentially a ConjunctiveGraph over an explicit subset of the entire store. """ def __init__(self, graphs,store='default'): if store is not None: super(ReadOnlyGraphAggregate, self).__init__(store) assert isinstance(graphs, list) and graphs\ and [g for g in graphs if isinstance(g, Graph)],\ "graphs argument must be a list of Graphs!!" self.graphs = graphs def __repr__(self): return "" % len(self.graphs) def destroy(self, configuration): raise ModificationException() #Transactional interfaces (optional) def commit(self): raise ModificationException() def rollback(self): raise ModificationException() def open(self, configuration, create=False): # TODO: is there a use case for this method? for graph in self.graphs: graph.open(self, configuration, create) def close(self): for graph in self.graphs: graph.close() def add(self, (s, p, o)): raise ModificationException() def addN(self, quads): raise ModificationException() def remove(self, (s, p, o)): raise ModificationException() def triples(self, (s, p, o)): for graph in self.graphs: for s1, p1, o1 in graph.triples((s, p, o)): yield (s1, p1, o1) def quads(self,(s,p,o)): """Iterate over all the quads in the entire aggregate graph""" for graph in self.graphs: for s1, p1, o1 in graph.triples((s, p, o)): yield (s1, p1, o1, graph) def __len__(self): return reduce(lambda x, y: x + y, [len(g) for g in self.graphs]) def __hash__(self): raise UnSupportedAggregateOperation() def __cmp__(self, other): if other is None: return -1 elif isinstance(other, Graph): return -1 elif isinstance(other, ReadOnlyGraphAggregate): return cmp(self.graphs, other.graphs) else: return -1 def __iadd__(self, other): raise ModificationException() def __isub__(self, other): raise ModificationException() # Conv. methods def triples_choices(self, (subject, predicate, object_), context=None): for graph in self.graphs: choices = graph.triples_choices((subject, predicate, object_)) for (s, p, o) in choices: yield (s, p, o) def qname(self, uri): raise UnSupportedAggregateOperation() def compute_qname(self, uri): raise UnSupportedAggregateOperation() def bind(self, prefix, namespace, override=True): raise UnSupportedAggregateOperation() def namespaces(self): if hasattr(self,'namespace_manager'): for prefix, namespace in self.namespace_manager.namespaces(): yield prefix, namespace else: for graph in self.graphs: for prefix, namespace in graph.namespaces(): yield prefix, namespace def absolutize(self, uri, defrag=1): raise UnSupportedAggregateOperation() def parse(self, source, publicID=None, format="xml", **args): raise ModificationException() def n3(self): raise UnSupportedAggregateOperation() def __reduce__(self): raise UnSupportedAggregateOperation() def test(): import doctest doctest.testmod() if __name__ == '__main__': test() rdflib-2.4.2/rdflib/__init__.py0000644000175000017500000000212011204354317015271 0ustar nachonacho# RDF Library __version__ = "2.4.2" __date__ = "2009/05/18" import sys # generator expressions require 2.4 assert sys.version_info >= (2,4,0), "rdflib requires Python 2.4 or higher" del sys import logging _logger = logging.getLogger("rdflib") _logger.info("version: %s" % __version__) from rdflib.URIRef import URIRef from rdflib.BNode import BNode from rdflib.Literal import Literal from rdflib.Variable import Variable from rdflib.Namespace import Namespace from rdflib import RDF from rdflib import RDFS # from rdflib.Graph import Graph # perhaps in 3.0, but for 2.x we # don't want to break compatibility. from rdflib.Graph import BackwardCompatGraph as Graph from rdflib.Graph import ConjunctiveGraph from rdflib.FileInputSource import FileInputSource from rdflib.URLInputSource import URLInputSource from rdflib.StringInputSource import StringInputSource # if zope.interface is not installed, these calls do nothing from rdflib.interfaces import IIdentifier, classImplements classImplements(URIRef, IIdentifier) classImplements(BNode, IIdentifier) classImplements(Literal, IIdentifier) rdflib-2.4.2/rdflib/URLInputSource.py0000644000175000017500000000135311153616035016405 0ustar nachonachofrom urllib2 import urlopen, Request from xml.sax.xmlreader import InputSource from rdflib import __version__ # TODO: add types for n3. text/rdf+n3 ? headers = { 'Accept': 'application/rdf+xml,application/xhtml+xml;q=0.5', 'User-agent': 'rdflib-%s (http://rdflib.net/; eikeon@eikeon.com)' % __version__ } class URLInputSource(InputSource, object): def __init__(self, system_id=None): super(URLInputSource, self).__init__(system_id) self.url = system_id # So that we send the headers we want to... req = Request(system_id, None, headers) file = urlopen(req) self.setByteStream(file) # TODO: self.setEncoding(encoding) def __repr__(self): return self.url rdflib-2.4.2/rdflib/TextIndex.py0000644000175000017500000002750311153616035015463 0ustar nachonachotry: from hashlib import md5 except ImportError: from md5 import md5 from rdflib.BNode import BNode from rdflib.Graph import ConjunctiveGraph from rdflib.Literal import Literal from rdflib.Namespace import NamespaceDict as Namespace from rdflib.URIRef import URIRef from rdflib.store import TripleAddedEvent, TripleRemovedEvent from rdflib.store.IOMemory import IOMemory import logging import re #, stopdict _logger = logging.getLogger(__name__) def get_stopdict(): """Return a dictionary of stopwords.""" return _dict _words = [ "a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with" ] _dict = {} for w in _words: _dict[w] = None word_pattern = re.compile(r"(?u)\w+") has_stop = get_stopdict().has_key def splitter(s): return word_pattern.findall(s) def stopper(s): return [w.lower() for w in s if not has_stop(w)] class TextIndex(ConjunctiveGraph): """ An rdflib graph event handler than indexes text literals that are added to a another graph. This class lets you 'search' the text literals in an RDF graph. Typically in RDF to search for a substring in an RDF graph you would have to 'brute force' search every literal string looking for your substring. Instead, this index stores the words in literals into another graph whose structure makes searching for terms much less expensive. It does this by chopping up the literals into words, removing very common words (currently only in English) and then adding each of those words into an RDF graph that describes the statements in the original graph that the word came from. First, let's create a graph that will transmit events and a text index that will receive those events, and then subscribe the text index to the event graph: >>> e = ConjunctiveGraph() >>> t = TextIndex() >>> t.subscribe_to(e) When triples are added to the event graph (e) events will be fired that trigger event handlers in subscribers. In this case our only subscriber is a text index and its action is to index triples that contain literal RDF objects. Here are 3 such triples: >>> e.add((URIRef('a'), URIRef('title'), Literal('one two three'))) >>> e.add((URIRef('b'), URIRef('title'), Literal('two three four'))) >>> e.add((URIRef('c'), URIRef('title'), Literal('three four five'))) Of the three literal objects that were added, they all contain five unique terms. These terms can be queried directly from the text index: >>> t.term_strings() == set(['four', 'five', 'three', 'two', 'one']) True Now we can search for statement that contain certain terms. Let's search for 'one' which occurs in only one of the literals provided, 'a'. This can be queried for: >>> t.search('one') set([(rdflib.URIRef('a'), rdflib.URIRef('title'), None)]) 'one' and 'five' only occur in one statement each, 'two' and 'four' occur in two, and 'three' occurs in three statements: >>> len(list(t.search('one'))) 1 >>> len(list(t.search('two'))) 2 >>> len(list(t.search('three'))) 3 >>> len(list(t.search('four'))) 2 >>> len(list(t.search('five'))) 1 Lets add some more statements with different predicates. >>> e.add((URIRef('a'), URIRef('creator'), Literal('michel'))) >>> e.add((URIRef('b'), URIRef('creator'), Literal('Atilla the one Hun'))) >>> e.add((URIRef('c'), URIRef('creator'), Literal('michel'))) >>> e.add((URIRef('d'), URIRef('creator'), Literal('Hun Mung two'))) Now 'one' occurs in two statements: >>> assert len(list(t.search('one'))) == 2 And 'two' occurs in three statements, here they are: >>> t.search('two') set([(rdflib.URIRef('d'), rdflib.URIRef('creator'), None), (rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) The predicates that are searched can be restricted by provding an argument to 'search()': >>> t.search('two', URIRef('creator')) set([(rdflib.URIRef('d'), rdflib.URIRef('creator'), None)]) >>> t.search('two', URIRef(u'title')) set([(rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) You can search for more than one term by simply including it in the query: >>> t.search('two three', URIRef(u'title')) set([(rdflib.URIRef('c'), rdflib.URIRef('title'), None), (rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) The above query returns all the statements that contain 'two' OR 'three'. For the documents that contain 'two' AND 'three', do an intersection of two queries: >>> t.search('two', URIRef(u'title')).intersection(t.search(u'three', URIRef(u'title'))) set([(rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) Intersection two queries like this is probably not the most efficient way to do it, but for reasonable data sets this isn't a problem. Larger data sets will want to query the graph with sparql or something else more efficient. In all the above queries, the object of each statement was always 'None'. This is because the index graph does not store the object data, that would make it very large, and besides the data is available in the original data graph. For convenience, a method is provides to 'link' an index graph to a data graph. This allows the index to also provide object data in query results. >>> t.link_to(e) >>> set([str(i[2]) for i in t.search('two', URIRef(u'title')).intersection(t.search(u'three', URIRef(u'title')))]) == set(['two three four', 'one two three']) True You can remove the link by assigning None: >>> t.link_to(None) Unindexing means to remove statments from the index graph that corespond to a statement in the data graph. Note that while it is possible to remove the index information of the occurances of terms in statements, it is not possible to remove the terms themselves, terms are 'absolute' and are never removed from the index graph. This is not a problem since languages have finite terms: >>> e.remove((URIRef('a'), URIRef('creator'), Literal('michel'))) >>> e.remove((URIRef('b'), URIRef('creator'), Literal('Atilla the one Hun'))) >>> e.remove((URIRef('c'), URIRef('creator'), Literal('michel'))) >>> e.remove((URIRef('d'), URIRef('creator'), Literal('Hun Mung two'))) Now 'one' only occurs in one statement: >>> assert len(list(t.search('one'))) == 1 And 'two' only occurs in two statements, here they are: >>> t.search('two') set([(rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) The predicates that are searched can be restricted by provding an argument to 'search()': >>> t.search('two', URIRef(u'creator')) set([]) >>> t.search('two', URIRef(u'title')) set([(rdflib.URIRef('a'), rdflib.URIRef('title'), None), (rdflib.URIRef('b'), rdflib.URIRef('title'), None)]) """ linked_data = None text_index = Namespace('http://rdflib.net/text_index#') term = Namespace('http://rdflib.net/text_index#')["term"] termin = Namespace('http://rdflib.net/text_index#')["termin"] def __init__(self, store='default'): super(TextIndex, self).__init__(store) def add_handler(self, event): if type(event.triple[2]) is Literal: self.index(event.triple) def remove_handler(self, event): if type(event.triple[2]) is Literal: self.unindex(event.triple) def index(self, (s, p, o)): # this code is tricky so it's annotated. unindex is the reverse of this method. if type(o) is Literal: # first, only index statements that have a literal object for word in stopper(splitter(o)): # split the literal and remove any stopwords word = Literal(word) # create a new literal for each word in the object # if that word already exists in the statement # loop over each context the term occurs in if self.value(predicate=self.term, object=word, any=True): for t in set(self.triples((None, self.term, word))): t = t[0] # if the graph does not contain an occurance of the term in the statement's subject # then add it if not (t, self.termin, s) in self: self.add((t, self.termin, s)) # ditto for the predicate if not (p, t, s) in self: self.add((p, t, s)) else: # if the term does not exist in the graph, add it, and the references to the statement. # t gets used as a predicate, create identifier accordingly (AKA can't be a BNode) h = md5(word.encode('utf-8')); h.update(s.encode('utf-8')); h.update(p.encode('utf-8')) t = self.text_index["term_%s" % h.hexdigest()] self.add((t, self.term, word)) self.add((t, self.termin, s)) self.add((p, t, s)) def unindex(self, (s, p, o)): if type(o) is Literal: for word in stopper(splitter(o)): word = Literal(word) if self.value(predicate=self.term, object=word, any=True): for t in self.triples((None, self.term, word)): t = t[0] if (t, self.termin, s) in self: self.remove((t, self.termin, s)) if (p, t, s) in self: self.remove((p, t, s)) def terms(self): """ Returns a generator that yields all of the term literals in the graph. """ return set(self.objects(None, self.term)) def term_strings(self): """ Return a list of term strings. """ return set([str(i) for i in self.terms()]) def search(self, terms, predicate=None): """ Returns a set of all the statements the term occurs in. """ if predicate and not isinstance(predicate, URIRef): _logger.warning("predicate is not a URIRef") predicate = URIRef(predicate) results = set() terms = [Literal(term) for term in stopper(splitter(terms))] for term in terms: for t in self.triples((None, self.term, term)): for o in self.objects(t[0], self.termin): for p in self.triples((predicate, t[0], o)): if self.linked_data is None: results.add((o, p[0], None)) else: results.add((o, p[0], self.linked_data.value(o, p[0]))) return results def index_graph(self, graph): """ Index a whole graph. Must be a conjunctive graph. """ for t in graph.triples((None,None,None)): self.index(t) def link_to(self, graph): """ Link to a graph """ self.linked_data = graph def subscribe_to(self, graph): """ Subscribe this index to a graph. """ graph.store.dispatcher.subscribe(TripleAddedEvent, self.add_handler) graph.store.dispatcher.subscribe(TripleRemovedEvent, self.remove_handler) def test(): import doctest doctest.testmod() if __name__ == '__main__': test() rdflib-2.4.2/rdflib/RDFS.py0000644000175000017500000000110411153616035014272 0ustar nachonachofrom rdflib.Namespace import Namespace RDFSNS = Namespace("http://www.w3.org/2000/01/rdf-schema#") Resource = RDFSNS["Resource"] Class = RDFSNS["Class"] subClassOf = RDFSNS["subClassOf"] subPropertyOf = RDFSNS["subPropertyOf"] comment = RDFSNS["comment"] label = RDFSNS["label"] domain = RDFSNS["domain"] range = RDFSNS["range"] seeAlso = RDFSNS["seeAlso"] isDefinedBy = RDFSNS["isDefinedBy"] Literal = RDFSNS["Literal"] Container = RDFSNS["Container"] ContainerMembershipProperty = RDFSNS["ContainerMembershipProperty"] member = RDFSNS["member"] Datatype = RDFSNS["Datatype"] rdflib-2.4.2/rdflib/plugin.py0000644000175000017500000000742111165212723015041 0ustar nachonachofrom rdflib.store import Store from rdflib.syntax import serializer, serializers from rdflib.syntax import parsers from rdflib import sparql from rdflib.QueryResult import QueryResult _kinds = {} _adaptors = {} def register(name, kind, module_path, class_name): _module_info = _kinds.get(kind, None) if _module_info is None: _module_info = _kinds[kind] = {} _module_info[name] = (module_path, class_name) def get(name, kind): _module_info = _kinds.get(kind) if _module_info and name in _module_info: module_path, class_name = _module_info[name] module = __import__(module_path, globals(), locals(), [""]) return getattr(module, class_name) else: Adaptor = kind # TODO: look up of adaptor, for now just use kind try: Adaptee = get(name, _adaptors[kind]) except Exception, e: raise Exception("could not get plugin for %s, %s: %s" % (name, kind, e)) def const(*args, **keywords): return Adaptor(Adaptee(*args, **keywords)) return const def register_adaptor(adaptor, adaptee): _adaptors[adaptor] = adaptee register_adaptor(serializer.Serializer, serializers.Serializer) #register_adaptor(parser.Parser, parsers.Parser) register('rdf', serializers.Serializer, 'rdflib.syntax.serializers.XMLSerializer', 'XMLSerializer') register('xml', serializers.Serializer, 'rdflib.syntax.serializers.XMLSerializer', 'XMLSerializer') register('rdf/xml', serializers.Serializer, 'rdflib.syntax.serializers.XMLSerializer', 'XMLSerializer') register('pretty-xml', serializers.Serializer, 'rdflib.syntax.serializers.PrettyXMLSerializer', 'PrettyXMLSerializer') register('nt', serializers.Serializer, 'rdflib.syntax.serializers.NTSerializer', 'NTSerializer') register('turtle', serializers.Serializer, 'rdflib.syntax.serializers.TurtleSerializer', 'TurtleSerializer') register('n3', serializers.Serializer, 'rdflib.syntax.serializers.N3Serializer', 'N3Serializer') register('xml', parsers.Parser, 'rdflib.syntax.parsers.RDFXMLParser', 'RDFXMLParser') register('trix', parsers.Parser, 'rdflib.syntax.parsers.TriXParser', 'TriXParser') register('n3', parsers.Parser, 'rdflib.syntax.parsers.N3Parser', 'N3Parser') register('notation3', parsers.Parser, 'rdflib.syntax.parsers.N3Parser', 'N3Parser') register('nt', parsers.Parser, 'rdflib.syntax.parsers.NTParser', 'NTParser') register('n3', parsers.Parser, 'rdflib.syntax.parsers.N3Parser', 'N3Parser') register('rdfa', parsers.Parser, 'rdflib.syntax.parsers.RDFaParser', 'RDFaParser') register('default', Store, 'rdflib.store.IOMemory', 'IOMemory') register('IOMemory', Store, 'rdflib.store.IOMemory', 'IOMemory') register('Memory', Store, 'rdflib.store.Memory', 'Memory') register('Sleepycat', Store, 'rdflib.store.Sleepycat', 'Sleepycat') register('BerkeleyDB', Store, 'rdflib.store.BerkeleyDB', 'BerkeleyDB') register('BDBOptimized', Store, 'rdflib.store.BDBOptimized', 'BDBOptimized') register('PostgreSQL', Store, 'rdflib.store.PostgreSQL', 'PostgreSQL') register('MySQL', Store, 'rdflib.store.MySQL', 'MySQL') register('SQLite', Store, 'rdflib.store.SQLite', 'SQLite') register('ZODB', Store, 'rdflib.store.ZODB', 'ZODB') register('sqlobject', Store, 'rdflib.store._sqlobject', 'SQLObject') register('Redland', Store, 'rdflib.store.Redland', 'Redland') register('MySQL', Store, 'rdflib.store.MySQL', 'MySQL') register("sparql", sparql.Processor, 'rdflib.sparql.bison.Processor', 'Processor') register("SPARQLQueryResult", QueryResult, 'rdflib.sparql.QueryResult', 'SPARQLQueryResult') rdflib-2.4.2/rdflib/Node.py0000644000175000017500000000012011153616035014416 0ustar nachonachoclass Node(object): """ A Node in the Graph. """ __slots__ = () rdflib-2.4.2/rdflib_tools/0000755000175000017500000000000011204354476014413 5ustar nachonachordflib-2.4.2/rdflib_tools/README0000644000175000017500000000036611153616035015273 0ustar nachonachoSome tools built using rdflib. Eventually these will become part of a release separate from rdflib, but until such a separate release exists we'll start gathering some here. These rdflib_tool are being released under than same license as rdflib.rdflib-2.4.2/rdflib_tools/RDFPipe.py0000755000175000017500000000530611153616035016220 0ustar nachonacho#!/usr/bin/env python from pprint import pprint from rdflib.Namespace import Namespace from rdflib import plugin,RDF,RDFS,URIRef from rdflib.store import Store from rdflib.Graph import Graph from rdflib.syntax.NamespaceManager import NamespaceManager RDFLIB_CONNECTION='' RDFLIB_STORE='IOMemory' import getopt, sys def usage(): print """USAGE: RDFPipe.py [options] Options: --stdin Parse RDF from STDIN (useful for piping) --help --input-format Format of the input document(s). One of: 'xml','trix','n3','nt','rdfa' --output Format of the final serialized RDF graph. One of: 'n3','xml','pretty-xml','turtle',or 'nt' --ns=prefix=namespaceUri Register a namespace binding (QName prefix to a base URI). This can be used more than once""" def main(): try: opts, args = getopt.getopt(sys.argv[1:], "", ["output=","ns=","input=","stdin","help","input-format="]) except getopt.GetoptError, e: # print help information and exit: print e usage() sys.exit(2) factGraphs = [] factFormat = 'xml' useRuleFacts = False nsBinds = { 'rdf' : RDF.RDFNS, 'rdfs': RDFS.RDFSNS, 'owl' : "http://www.w3.org/2002/07/owl#", 'dc' : "http://purl.org/dc/elements/1.1/", 'foaf': "http://xmlns.com/foaf/0.1/", 'wot' : "http://xmlns.com/wot/0.1/" } outMode = 'n3' stdIn = False if not opts: usage() sys.exit() for o, a in opts: if o == '--input-format': factFormat = a elif o == '--stdin': stdIn = True elif o == '--output': outMode = a elif o == '--ns': pref,nsUri = a.split('=') nsBinds[pref]=nsUri elif o == "--input": factGraphs = a.split(',') elif o == "--help": usage() sys.exit() store = plugin.get(RDFLIB_STORE,Store)() store.open(RDFLIB_CONNECTION) namespace_manager = NamespaceManager(Graph()) for prefix,uri in nsBinds.items(): namespace_manager.bind(prefix, uri, override=False) factGraph = Graph(store) factGraph.namespace_manager = namespace_manager if factGraphs: for fileN in factGraphs: factGraph.parse(fileN,format=factFormat) if stdIn: factGraph.parse(sys.stdin,format=factFormat) print factGraph.serialize(destination=None, format=outMode, base=None) store.rollback() if __name__ == "__main__": main() rdflib-2.4.2/rdflib_tools/EARLPlugin.py0000644000175000017500000000444711153616035016673 0ustar nachonacho""" A Nose Plugin for EARL. See Also: http://nose.python-hosting.com/ http://www.w3.org/TR/EARL10-Schema/ """ import logging import sys from nose.plugins import Plugin from nose.suite import TestModule from rdflib import URIRef, BNode, Literal from rdflib import RDF, RDFS from rdflib.Graph import Graph from rdflib.Namespace import NamespaceDict as Namespace from rdflib.util import date_time log = logging.getLogger(__name__) EARL = Namespace("http://www.w3.org/ns/earl#") class EARLPlugin(Plugin): """ Activate the EARL plugin to generate a report of the test results using EARL. """ name = 'EARL' def begin(self): self.graph = Graph() self.graph.bind("earl", EARL.uri) def finalize(self, result): # TODO: add plugin options for specifying where to send # output. self.graph.serialize("file:results-%s.rdf" % date_time(), format="pretty-xml") def addDeprecated(self, test): print "Deprecated: %s" % test def addError(self, test, err, capt): print "Error: %s" % test def addFailure(self, test, err, capt, tb_info): print "Failure: %s" % test def addSkip(self, test): print "Skip: %s" % test def addSuccess(self, test, capt): result = BNode() # TODO: coin URIRef self.graph.add((result, RDFS.label, Literal(test))) self.graph.add((result, RDFS.comment, Literal(type(test)))) self.graph.add((result, RDF.type, EARL.TestResult)) self.graph.add((result, EARL.outcome, EARL["pass"])) # etc """ Invalid Markup (code #353)

The table element is not allowed to appear inside a p element

2006-08-13

It seems the p element has not been closed

""" rdflib-2.4.2/rdflib_tools/__init__.py0000644000175000017500000000000211153616035016507 0ustar nachonacho# rdflib-2.4.2/setup.py0000644000175000017500000000421311165212204013427 0ustar nachonachofrom setuptools import setup, find_packages from distutils.extension import Extension # Install rdflib from rdflib import __version__, __date__ setup( name = 'rdflib', version = __version__, description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information.", author = "Daniel 'eikeon' Krech", author_email = "eikeon@eikeon.com", maintainer = "Daniel 'eikeon' Krech", maintainer_email = "eikeon@eikeon.com", url = "http://rdflib.net/", license = "http://rdflib.net/latest/LICENSE", platforms = ["any"], classifiers = ["Programming Language :: Python", "License :: OSI Approved :: BSD License", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: OS Independent", "Natural Language :: English", ], long_description = \ """RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information. The library contains parsers and serializers for RDF/XML, N3, NTriples, Turtle, TriX and RDFa . The library presents a Graph interface which can be backed by any one of a number of Store implementations, including, Memory, MySQL, Redland, SQLite, Sleepycat, ZODB and SQLObject. If you have recently reported a bug marked as fixed, or have a craving for the very latest, you may want the development version instead: http://svn.rdflib.net/trunk#egg=rdflib-dev """, download_url = "http://rdflib.net/rdflib-%s.tar.gz" % __version__, packages = find_packages(exclude=["test"]), ext_modules = [ Extension( name='rdflib.sparql.bison.SPARQLParserc', sources=['src/bison/SPARQLParser.c'], ), ], tests_require = ["nose==0.9.2"], test_suite = 'nose.collector', entry_points = { 'console_scripts': [ 'rdfpipe = rdflib_tools.RDFPipe:main', ], 'nose.plugins': [ 'EARLPlugin = rdflib_tools.EARLPlugin:EARLPlugin', ], }, ) rdflib-2.4.2/ez_setup.py0000644000175000017500000002276411071223037014142 0ustar nachonacho#!python """Bootstrap setuptools installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from ez_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import sys DEFAULT_VERSION = "0.6c9" DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3] md5_data = { 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', 'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb', 'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b', 'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a', 'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618', 'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac', 'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5', 'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4', 'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c', 'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b', 'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27', 'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277', 'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa', 'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e', 'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e', 'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f', 'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2', 'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc', 'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167', 'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64', 'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d', 'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20', 'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab', 'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53', 'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2', 'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e', 'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372', 'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902', 'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de', 'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b', 'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03', 'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a', 'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6', 'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a', } import sys, os try: from hashlib import md5 except ImportError: from md5 import md5 def _validate_md5(egg_name, data): if egg_name in md5_data: digest = md5(data).hexdigest() if digest != md5_data[egg_name]: print >>sys.stderr, ( "md5 validation of %s failed! (Possible download problem?)" % egg_name ) sys.exit(2) return data def use_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15 ): """Automatically find/download setuptools and make it available on sys.path `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where setuptools will be downloaded, if it is not already available. If `download_delay` is specified, it should be the number of seconds that will be paused before initiating a download, should one be required. If an older version of setuptools is installed, this routine will print a message to ``sys.stderr`` and raise SystemExit in an attempt to abort the calling script. """ was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules def do_download(): egg = download_setuptools(version, download_base, to_dir, download_delay) sys.path.insert(0, egg) import setuptools; setuptools.bootstrap_install_from = egg try: import pkg_resources except ImportError: return do_download() try: pkg_resources.require("setuptools>="+version); return except pkg_resources.VersionConflict, e: if was_imported: print >>sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" "can't be installed while this script is running. Please install\n" " a more recent version first, using 'easy_install -U setuptools'." "\n\n(Currently using %r)" ) % (version, e.args[0]) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return do_download() except pkg_resources.DistributionNotFound: return do_download() def download_setuptools( version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay = 15 ): """Download setuptools from a specified location and return its filename `version` should be a valid setuptools version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ import urllib2, shutil egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) url = download_base + egg_name saveto = os.path.join(to_dir, egg_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: from distutils import log if delay: log.warn(""" --------------------------------------------------------------------------- This script requires setuptools version %s to run (even to display help). I will attempt to download it for you (from %s), but you may need to enable firewall access for this script first. I will start the download in %d seconds. (Note: if this machine does not have network access, please obtain the file %s and place it in this directory before rerunning this script.) ---------------------------------------------------------------------------""", version, download_base, delay, url ); from time import sleep; sleep(delay) log.warn("Downloading %s", url) src = urllib2.urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = _validate_md5(egg_name, src.read()) dst = open(saveto,"wb"); dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" try: import setuptools except ImportError: egg = None try: egg = download_setuptools(version, delay=0) sys.path.insert(0,egg) from setuptools.command.easy_install import main return main(list(argv)+[egg]) # we're done here finally: if egg and os.path.exists(egg): os.unlink(egg) else: if setuptools.__version__ == '0.0.1': print >>sys.stderr, ( "You have an obsolete version of setuptools installed. Please\n" "remove it from your system entirely before rerunning this script." ) sys.exit(2) req = "setuptools>="+version import pkg_resources try: pkg_resources.require(req) except pkg_resources.VersionConflict: try: from setuptools.command.easy_install import main except ImportError: from easy_install import main main(list(argv)+[download_setuptools(delay=0)]) sys.exit(0) # try to force an exit else: if argv: from setuptools.command.easy_install import main main(argv) else: print "Setuptools version",version,"or greater has been installed." print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' def update_md5(filenames): """Update our built-in md5 registry""" import re for name in filenames: base = os.path.basename(name) f = open(name,'rb') md5_data[base] = md5(f.read()).hexdigest() f.close() data = [" %r: %r,\n" % it for it in md5_data.items()] data.sort() repl = "".join(data) import inspect srcfile = inspect.getsourcefile(sys.modules[__name__]) f = open(srcfile, 'rb'); src = f.read(); f.close() match = re.search("\nmd5_data = {\n([^}]+)}", src) if not match: print >>sys.stderr, "Internal error!" sys.exit(2) src = src[:match.start(1)] + repl + src[match.end(1):] f = open(srcfile,'w') f.write(src) f.close() if __name__=='__main__': if len(sys.argv)>2 and sys.argv[1]=='--md5update': update_md5(sys.argv[2:]) else: main(sys.argv[1:]) rdflib-2.4.2/examples/0000755000175000017500000000000011204354476013547 5ustar nachonachordflib-2.4.2/examples/swap_primer.py0000644000175000017500000000604211153616037016450 0ustar nachonacho# http://www.w3.org/2000/10/swap/Primer # This is a simple primer using some of the # example stuff in the above Primer on N3 # get RDFLib at http://rdflib.net/ # Load up RDFLib from rdflib import * # Firstly, it doesn't have to be so complex. # Here we create a "Graph" of our work. # Think of it as a blank piece of graph paper! primer = ConjunctiveGraph() myNS = Namespace('#') primer.add((myNS.pat, myNS.knows, myNS.jo)) # or: primer.add((myNS['pat'], myNS['age'], long(24))) # Now, with just that, lets see how the system # recorded *way* too many details about what # you just asserted as fact. # from pprint import pprint pprint(list(primer)) # just think .whatever((s, p, o)) # here we report on what we know pprint(list(primer.subjects())) pprint(list(primer.predicates())) pprint(list(primer.objects())) # and other things that make sense # what do we know about pat? pprint(list(primer.predicate_objects(myNS.pat))) # who is what age? pprint(list(primer.subject_objects(myNS.age))) # Okay, so lets now work with a bigger # dataset from the example, and start # with a fresh new graph. primer = ConjunctiveGraph() # Lets start with a verbatim string straight from the primer text: mySource = """ @prefix : . @prefix rdf: . @prefix rdfs: . @prefix owl: . @prefix dc: . @prefix foo: . @prefix swap: . <> dc:title "Primer - Getting into the Semantic Web and RDF using N3". <#pat> <#knows> <#jo> . <#pat> <#age> 24 . <#al> is <#child> of <#pat> . <#pat> <#child> <#al>, <#chaz>, <#mo> ; <#age> 24 ; <#eyecolor> "blue" . :Person a rdfs:Class. :Pat a :Person. :Woman a rdfs:Class; rdfs:subClassOf :Person . :sister a rdf:Property. :sister rdfs:domain :Person; rdfs:range :Woman. :Woman = foo:FemaleAdult . :Title a rdf:Property; = dc:title . """ # --- End of primer code # To make this go easier to spit back out... # technically, we already created a namespace # with the object init (and it added some namespaces as well) # By default, your main namespace is the URI of your # current working directory, so lets make that simpler: myNS = Namespace(URIRef('http://www.w3.org/2000/10/swap/Primer#')) primer.bind('', myNS) primer.bind('owl', 'http://www.w3.org/2002/07/owl#') primer.bind('dc', 'http://purl.org/dc/elements/1.1/') primer.bind('swap', 'http://www.w3.org/2000/10/swap/') sourceCode = StringInputSource(mySource, myNS) # Lets load it up! primer.parse(sourceCode, format='n3') # Now you can query, either directly straight into a list: [(x, y, z) for x, y, z in primer] # or spit it back out (mostly) the way we created it: print primer.serialize(format='n3') # for more insight into things already done, lets see the namespaces list(primer.namespaces()) # lets ask something about the data list(primer.objects(myNS.pat, myNS.child)) rdflib-2.4.2/examples/example.py0000644000175000017500000000330011153616037015545 0ustar nachonachoimport logging # Configure how we want rdflib logger to log messages _logger = logging.getLogger("rdflib") _logger.setLevel(logging.DEBUG) _hdlr = logging.StreamHandler() _hdlr.setFormatter(logging.Formatter('%(name)s %(levelname)s: %(message)s')) _logger.addHandler(_hdlr) from rdflib.Graph import Graph from rdflib import URIRef, Literal, BNode, Namespace from rdflib import RDF store = Graph() # Bind a few prefix, namespace pairs. store.bind("dc", "http://http://purl.org/dc/elements/1.1/") store.bind("foaf", "http://xmlns.com/foaf/0.1/") # Create a namespace object for the Friend of a friend namespace. FOAF = Namespace("http://xmlns.com/foaf/0.1/") # Create an identifier to use as the subject for Donna. donna = BNode() # Add triples using store's add method. store.add((donna, RDF.type, FOAF["Person"])) store.add((donna, FOAF["nick"], Literal("donna", lang="foo"))) store.add((donna, FOAF["name"], Literal("Donna Fales"))) # Iterate over triples in store and print them out. print "--- printing raw triples ---" for s, p, o in store: print s, p, o # For each foaf:Person in the store print out its mbox property. print "--- printing mboxes ---" for person in store.subjects(RDF.type, FOAF["Person"]): for mbox in store.objects(person, FOAF["mbox"]): print mbox # Serialize the store as RDF/XML to the file foaf.rdf. store.serialize("foaf.rdf", format="pretty-xml", max_depth=3) # Let's show off the serializers print "RDF Serializations:" # Serialize as XML print "--- start: rdf-xml ---" print store.serialize(format="pretty-xml") print "--- end: rdf-xml ---\n" # Serialize as NTriples print "--- start: ntriples ---" print store.serialize(format="nt") print "--- end: ntriples ---\n"